var/home/core/zuul-output/0000755000175000017500000000000015137217367014541 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015137231737015502 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000344026015137231650020262 0ustar corecore3}ikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD -/@}V i.߷;U/;?FެxۻfW޾n^`/ixK|1Ool_~yyiw|zxV^֯Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9ӋrYWoxCNQWs]8M%3KpNGIrND}2SRCK.(^$0^@hH9%!40Jm>*Kdg?y7|&#)3+o,2s%R>!%*XC7Ln* wCƕH#FLzsѹ Xߛk׹1{,wŻ4v+(n^RϚOGO;5p Cj·1z_j( ,"z-Ee}t(QCuˠMkmi+2z5iݸ6C~z+_Ex$\}*9h>t m2m`QɢJ[a|$ᑨj:D+ʎ; 9Gacm_jY-y`)͐o΁GWo(C U ?}aK+d&?>Y;ufʕ"uZ0EyT0: АPN I,״+i̸.^#v0nFNV-yg9(&e'd,LFlPh ۬rʗſ}8yj,('GrgTZ'U鋊TqOſ * /Ijo!՟8`"j}zӲ$k3jS|C7;A)͎V.r?t\WU1ojjr<~Tq> `=tJ!aݡ=h6Yݭw}?lѹ`f_" J9w4ts7NG GGG]ҡgc⌝M b/Ζlpah E ur C&`XR JcwB~R2EL9j7e\(Uё$׿atyХ?*t5z\+`/ErVQUxMҔ&ۈt.3;eg_O ξL1KiYLizpV:C5/=v-}҅"o ']쌕|tϓX8nJ*A*%J[T2pI1Je;s_[,Ҩ38_ь ͰM0ImY/MiVJ5&jNgBt90v߁R:~U jځU~oN9xԞ~J|dݤ߯R> kH&Y``:"s ayiBq)u%'4 yܽ yW0 -i̭uJ{KưЖ@+UBj -&JO x@}DS.€>3T0|9ē7$3z^.I< )9qf e%dhy:O40n'c}c1XҸuFiƠIkaIx( +")OtZ l^Z^CQ6tffZQ!q/pCTSqQyN,QEFKBmw&X(q8e&щu##Ct9Btka7v Ө⸇N~AE6xd~?D ^`wC4na~Uc)(l fJw>]cNdusmUSTYh>Eeք DKiPo`3 aezH5^n(}+~hX(d#iI@YUXPKL:3LVY~,nbW;W8QufiŒSq3<uqMQhiae̱F+,~Mn3 09WAu@>4Cr+N\9fǶy{0$Swwu,4iL%8nFВFL2#h5+C:D6A@5D!p=T,ښVcX㯡`2\fIԖ{[R:+I:6&&{Ldrǒ*!;[tʡP=_RFZx[|mi ǿ/&GioWiO[BdG.*)Ym<`-RAJLڈ}D1ykd7"/6sF%%´ƭ*( :xB_2YKoSrm_7dPΣ|ͣn/𚃚p9w#z A7yTJ$KOL-aP+;;%+_6'Sr|@2nQ{aK|bjܒ^o(מO80$QxBcXE ء\G=~j{Mܚ: hLT!uP_T{G7C]Ch',ެJG~Jc{xt zܳ'鮱iX%x/QOݸ}S^vv^2M!.xR0I(P 'fΑQ)ۢWP Pe>F=>l |fͨ3|'_iMcĚIdo阊;md^6%rd9#_v2:Y`&US tDkQ;>" ء:9_))wF|;~(XA PLjy*#etĨB$"xㄡʪMc~)j 1駭~բ>XiN .U轋RQ'Vt3,F3,#Y3,kJ3,LhVnKauomˠ_>2h-/ ђ(9Uq EmFjq1jX]DןR24d;嶑, }t&&\5u17\I@ 5O? ʴ(aPqPϟ'BpY]Q4`Iz_*2coT'ƟlQ.Ff!bpRw@\6"yr+i37Z_j*YLfnYJ~Z~okJX ?A?gU3U;,ד1t7lJ#wՆ;I|p"+I4ˬZcն a.1wXhxDI:;.^m9W_c.4z+ϟMn?!ԫ5H&=JkܓhkB\LQ"<LxeLo4l_m24^3.{oɼʪ~75/nQ?s d|pxu\uw?=QR -Mݞίk@Pc n1æ*m$=4Dbs+J \EƄզ}@۶(ߐ/ۼ𹫘qݎt7Ym݃|M$ 6.x5 TMXbXj-P\jА޴y$j`ROA"EkuS#q * CƂ lu" yo6"3껝I~flQ~NCBX`]ڦÞhkXO _-Qy2$?T3ͤEZ긊mۘ$XD.bͮW`AީClСw5/lbl[N*t*@56."D/< {Dۥ sLxZn$N(lYiV =?_e^0)?]{ @| 6+#gPX>Bk2_@L `CZ?z3~ }[ tŪ)۲-9ֆP}b&x Uhm._O 4m6^^osVЦ+*@5Fˢg'!>$]0 5_glg}릅h:@61Xv` 5DFnx ˭jCtu,R|ۯG8`&ו:ݓ3<:~iXN9`2ŦzhѤ^ MW`c?&d.'[\]}7A[?~R6*.9t,綨 3 6DFe^u; +֡X< paan}7ftJ^%0\?mg5k][ip4@]p6Uu|܀|Kx6خQU2KTǺ.ȕPQVzWuk{n#NWj8+\[ ?yiI~fs[:.۽ '5nWppH? 8>X+m7_Z`V j[ s3nϏT=1:T <= pDCm3-b _F(/f<8sl, 0۬Z"X.~b٦G3TE.֣eմi<~ik[m9뀥!cNIlؔ?\<uV?.*E!qQ5m㎤9I͸,0E.ŊygcEl#L)(g4^atNbe7}v+7Zo>W?%TbzK-6cb:XeGL`'žeVVޖ~;BLv[n|viPjbMeO?!hEfޮ])4 ?KN1o<]0Bg9lldXuT ʑ!Iu2ʌnB5*<^I^~G;Ja߄bHȌsK+D"̽E/"Icƀsu0,gy(&TI{ U܋N5 l͖h"褁lm *#n/Q!m b0X3i)\IN˭% Y&cKoG w 9pM^WϋQf7s#bd+SDL ,FZ<1Kx&C!{P|Ռr,* ] O;*X]Eg,5,ouZm8pnglVj!p2֬uT[QyB402|2d5K: `Bcz|Rxxl3{c` 1nhJzQHv?hbºܞz=73qSO0}Dc D]ͺjgw07'㤸z YJ\Hb9Ɖ„2Hi{(2HFE?*w*hy4ޙM^٫wF(p]EwQzr*! 5F XrO7E[!gJ^.a&HߣaaQÝ$_vyz4}0!yܒ栒޹a% Ŋ X!cJ!A\ ?E\R1 q/rJjd A4y4c+bQ̘TT!kw/nb͵FcRG0xeO sw5TV12R7<OG5cjShGg/5TbW > ]~Wޠ9dNiee$V[\[Qp-&u~a+3~;xUFFW>'ǣC~방u)т48ZdH;j a]`bGԹ#qiP(yڤ~dO@wA[Vz/$NW\F?H4kX6)F*1*(eJAaݡ krqB}q^fn 8y7P  GRޠkQn>eqQntq"Occ°NRjg#qSn02DŔw:ؽ 5l)Fa/TTmCԤ{"9b{ywSXE*m#3U ùRIvޏrJ`k|wJKH:O*OKy`( ݢe*{ ua ȻݔhvOkU~OǠI/aǕ-JMX _.6KsjA Qsmd  O#F.Uf28ZAgy>y,d$C?v01q5e.Um>]RLa&r?+@6k&#l)I5_> ` D s5npo}/ؙq #a2V?X~.4O/'|/_|&q̑0dd4>vk 60D _o~[Sw3ckpkpLNa ^j 5*<&}kˢmqvۗj=<Tr=[ a^؃ È(<^=xZb [_tܡ&yЋ{ Sym^?̑sU~' Ԓ f\itu)b>5X -]iSCQ&s~In/SZ % 'I Ƿ$#stV \'xMgaSZNg8>e!^f%cYr]qs:"̊;isXa]d+"v=x7p.fZCg_Ys;pE&\U}ܫSh])qKYAـhhdEnU14&G * QIQs;rԩ.k83֖8Muqu_48dHܥlWW q>fu6+'}xu\Veelz`Zbym gp8펠ˋֆ:1IC8qٞ\vXçL ]X/r}7O}Wh,h ;RQ=]u00yiC۔I^3!?H6iUH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)p# ы9'iri]ͪ/@繁qVGCڤr,DihB ,m 9 _$q3= A$IC"6g^4e`Xo(D*6"^eTh'4xpFڜe'fVQ7~'c L^ԯwIڣA.}H;Ë*׬=`^ 9]r鐃 -Dfi2|QwZk‹u^6DQ1&H凎c!n[mi3)WfsF:M"uҷs.1!뾧1%s,hQs|hx̗3%*v9(I;:'>uQ+v)vR/egBhAAdh]4H:nV$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jHI :{HG}HDN`h7@{jnE#[dz;n#y 9D*A$$"^)dVQ.(rO6ӟZw_Ȣaޒu'- ^_,G;U\cAAz7EtlLuoXuA}bT2H_*kIG?S(קjhg 5EF5uKkBYx-qCfqsn[?_r=V:х@mfVg,w}QJUtesYyt7Yr+"*DtO/o۷~|hw^5wE of7cꃱ.)7.u/}tPTGc 5tW> l/`I~>|灹mQ$>N |gZ ͜IH[RNOMTq~g d0/0Љ!yB.hH׽;}VLGp3I#8'xal&Ȑc$ d7?K6xAH1H#:f _tŒ^ hgiNas*@K{7tH*t쬆Ny497ͩ KVsVokwW&4*H'\ d$]Vmr달v9dB.bq:__xW|1=6 R3y^ E#LB ZaZd1,]ןkznxtK|v+`VZ3JϧC^|/{ś}r3 >6׳oƄ%VDSWn 0,qh! E-Z%ܹpU:&&fX+EǬ.ťqpNZܗÅxjsD|[,_4EqgMƒK6f/FXJRF>i XʽAQGwG%mgo 恤hˍJ_SgskwI\t`ﶘ080ƱQŀllKX@116fqo>NrU Ѣ9*|ãeeH7.z!<7zG4p9tV|̢T`˖E ;;,tTaIUle*$!>*mBA2,gJIn_kSz)JC]?X(OPJS3.}clݨ{e!MB,cB߮4af祋,1/_xq=fBRO0P'֫-kbM6Apw,GO2}MGK'#+սE^dˋf6Y bQEz}eҏnr_ ^O^W zw~Ȳ=sXअy{E|04SRm+0^PTi-"] O('@BKD6 {NmʐzRj.aQcb^CZ-uvpr CѐٱlGNzIveca=%1Qi F>wTLHUGӃ\sA֎Xpljlv ^tSȻ \cPwίwX"{>9V0ټ_`#U8VdTtD_GU9V ұ{q:ObUi7s )B ۊZlzIA4S#x,T3ѱ ԶJ=rs>Nb: Q6ˌ߉J%.Dl2ȱ%ܱ&6XƟ6qg(USok+Po$lwvmi8W_VT18V =| ub6QWCnY'"*aN08wuSEAVخ m3 o\` sHc# fqT .,ŀU|⦍߶/*~48âF,#[:y_YIpʼn)dk!J'Z5=r&; (y*b*O_ULT.ÔD[%s1,jЅ@k0Ցu֯dtKl$Y5O*GUڇvI`b0ο0~oI`b#FOf_$0!i rS/wvҍ%Eb/Ec|U9F-)L)ŘF`U:VK jeFrԋ7EDYpԽ.D\dNyj荊EEg]bÔF˩ք%EGƶ*NX)Hc(<|q@Oޯr^3>Uf1w;mCja:-1_k٘%VbZ˙#G6 `q+MPU~l!.?I_Pĝ"] rT [eTr؟˰ ]\ h! v˱>5S1px fnk}sRmA>d2UAkؖvlX܇Bz1U_#Xӫ+al H d\k/I,k,ρ|`zR/$@8VU^rcG"E7\qtS:ڝUyy >Vc11*?xYa8U`Jw/AcL~|;yj8TR#s"Q.ϊ/Yrx+u6*27fǪC%+A~*Zآ'ѭnۡ|< a1s\ T5҃FZh?EV"sd!@БU ^p%pO3|B5=2怕nwRqR9~ i±za+HFNi>. EWz:V^&YEs5Ȭ N *7{!fRБBSۘ† Er/IGU}APQT]|XN X]FbKjKdO U6[3TTX)|*H'2U0:VunBl  `5/@ա06VNO8VGON@KgjyK?Wq1egI+ I.*F~L!Gf"LD&U 6tGd#fR*c ^tSLjnKS9 Ȼ \ >lr&}+̼d"I va,Jm_u)d靕َ| Vw85F3Liƙb<;dM-})C?Fw*IJ_3UG'+¨[9| >80\+ xJpΕ`p~mg˗%F Rg(6=/r+%a>w Ohght uЍaRs ^d6GXAf?V_mW puȇ S:tŴvŀU#-*mZ5k5r)_x*8ͼx@(k:_TX%[paRu~}#Ѥr %A%`;MxB[CzR怕#H% }8@*AM.SEhd,rKrʇ)br\+! s1CtӒNc_:F*`Nv;ogQFa2V%ZniE|nZ&-I,t*ώlo Lhnٓ'Xm R ˍ-~ά}hs\5TT%~am.>!LcoJrKmqvez܅E9t6FZXgsreHhlٷ+ [}r:̓?W~e6>0E8`Jq-(ed;W¨:Ä&]䒿e;0:|$Ȃ1L-%;Ƅ{dɱL;V[bp>!n&աIJX1$9;[?- й vRCxKVV+#lj@_RL;IQ8ŢΌXD@Z< (1ZRÜ:OUM/vư{'jYXE4S/8 7: `/ +G\ U>]B2/n2=8) B gJ3bcKo̹ʇ\B~Is 2sO/I!}xV&\b<9$4Nve^آ]$LGF@LjKٕyzH 31Հm-XıUXF|\A-2) ' RG6h?āUŔyj[j_ӂ~ яA弆^bDyzǖQ8`jXbsK?l58,?YP5䜭ve9YFznTEf3Ja\,@2,?WYؾNr<V` =V[oB5!Z\ļǪЎr8@*ucѡv\[|s L-+y{5K@dzp`r"mũɸHNd"yc Pu>x2;W`_VR<aӗ&D<=h-Rר|/r _ǖھcߖ]G@Ն;UQG1 '3Jە Q88ASUȿ!:WѥLf21;d9OU᧯MR3V:<}xXh//T+coY5Ȧ4/m0NE(G2[+G~H'5ipӘ͏O +Px SPp.,?Uv]oȮW|_*[G'Mvi.p`ٺѫz8dIbmj1p8rq\"*HEuc(KǻX& 9H0V"X$[%]if~j`yq?V]xDcUBTQETƦ=1VE)E"Z'0ZŨXEYmϛ;]YǙq{\~*ϣB6Id Sx!_3?Z5'Gk)Xbi@NՌ1V|bpS/S %Yf Ny<2Vu~tDS`)'|tGL?60͓jBxyR.~اyt=N3;qv5V D .)]Rz1]Rzax,񥩞hZ`^j`XB]\Yp^Wp7|^+҄*PC]=hc9V`pC͸mN͐uZM@n5W,kȊgE5sU muP;g8э˲<=wUI37|٪Ÿ?/*%yMLuO<Z؋LHM`_2 O7_ѽPxE:y?-Vs5'~;Mmb̳mkӇeqפ񩩜B NoMLkb6Sfoa#qD0Z$,LEUڗJ ^ Mp%`ݡ!9]MΏl:UݺP ݼP ջ0j VQ6E5jAa) `j0M#mplp~ ~~zÒx$x}d0޼1>mc9S$_ ݗר[p)(JFc$6a E&ώj }GD2[&y+Njz2 kxz}Aa}Q|^X\/t@%"euΪ~2 .3Pl{ ygZɋ-*L MDZY躮a S 4w 7/Tu0-kzf1pB@\cq8!oƁ'Eq>j.bPx.8 X<Ws0a%6y!E,u_kט-&aM)@yD("y"/AD鄋bo&YF$gK@I'&(U&RL kR"h[Sԛd.dYUyvvOwy%K<cXڛGIbXja鳥:ْRU ߄yVYO[eV/NX$[@ $K xQZ;K?GSQZ+"{89). |A#F |W쭮iORfpJ "zdA#:&0krƕv٪P?ڥ?wtTCp xG C{R2/f>1O|W>ԏ'E1BEE Hpq"?9. :=/͊,i g."wYV5v>}GxCsv:c؂4u},,9MXgxi7Oߝ5k iPv @yݽ_ETb4O߳G|KHSa{ytG>yWē}8?><1HVWn@uowuNM LTE0CABx ;υ Ȁ% O'$ @nNsԙs"aEP$*}4iz'L6Uk8Dݏz_~S!Q>Ea]<ⴴۀmn`g!IKƵ~*hIX>b$׬ii^oI@L|du"pݢ}Yq6v][c nHiPv|@`y4>n뢭8oue9V8t\.ea?@MvŤD%Qp'z$MO`@$,ǃ,=,qبs@*x-1eAdR;ߤ5̰P(}r:`q<^r6#D9Mheʒ<&umӢFblolkw=~ @C 1y  y=n辅x9dsM&b.ܱ.f{r]`J4sh> glCp|y*ob" f`fx;,Y2O]Dk8@Uu|vԞFTQmuh[ )I/C&X^/!U*-ʗ`98S[MQs5`)k>nGZN+<{ 8me;ʚ!OXS|NC`}VHɓR,vXt**vEJd]vSfϦ[NoovK= 5uhd{ 7Ib轁()KDFf4F1g7t1n# ,/xtаgą8x'Jhna yln0GUɚ^%3(7Wk['ۓ"ŸVyUl6.fn'➀_/0%:~ٽc\ >ME~:tì޺`CV=ݿdQ3eKй blWKKj}Ha&w#.BV!,aR[Ғ6Ûڋ$j}w0^,J\[FO9FB(U2jx'G,/ AџA0(2Qa0[˂&5dm+D4+F*Cij:s0D3ZH:xcK^Gѭ|I5d?1*GG=`h#KKZ¼]f18.-p]ݡKsj׈ =߆iTe]>T ؓV&CXǕ rpP;RS{ItmAR>ݪj_e~MK(j_\Cl]Am]a>XMRi궜&Kt?+0<=>na 2 eZTlLF3ӭs*Z[&3YY(e  y4 9OGG|M[kESr`?W,ȕ9UNJ_0,L[:9r5ôBg]7ٚ88jkrv7=5iև ,/pw*>Bm-Nmɯ`lI:(mxpnDKT_rh$萒(Z4d!m\-zOeCtC*_El$szIM,FK ҈#OI YtCdgyXSV";sa8KUѕy`ëKmE b mO;OޑY:@ BK:=m2-a[*mD:u; Wj)}"$_;Cݔ(@f8xgvG{o3݇kdqp`@ƳM>U]@vKUPHHXn c:ϣܝƽU5y#HDq=4,,zӹi 6Zv |ϖKⲽM,{ a 3ݞl0"Cy[yK7HUҫ<7@!@!D\ X͡j̋;B+oԸ@Z/r΢w$ ΏڽӇ3uo?DZꝪg/9 ##!{!X%<5PP}cW7Bvix;vk,ndwmc#se%M^[sdiuY{*!$0?z0KqCQ4EE3X{]t16n)TpyƫtuGNsI>w F`}H/, gˊ }@OMMAJ*LQ sj{ qi Q4c񷦁[U{zχJ`49}]DUrFt\ۗt0Z%ETwѤO0$ICتʯ0a?g *`5A2=WSr*eZ|pRj7~DE:lzzɚjuz}8cҕGèvEy9{dwu w.O N#c JY7OPfEA \7v|)Gh>ؿ_ %ᷝ'-NP=>%-N ^Hm FYK.h !O0xvת 9aYsX%caݱ6ߒK[ /x n퍩=6C֋G&|RI]_i ;b  <ÅIl(% [x-!A5Dqx\hG8.o [JsN2?^& @*u׎6F"p ݺ_j0~&Ǐ37o^7/Syif 3)RjsHW؅}9qa\МA SwS>]D3+ rm$#2J)q m^FŃ D0)+y܍P*(]Cŀ)z_2 |DmTdU`{/Y{'-މI: ԀOeihOT_R]-]-6V#uE(",+-Y.M2n`]f:F}a]=NjM'Y {;! ]R?\$!`yX١rL& 4/™U(}a\ꃫJ12FXb#Pw z f"PDz`/e0Ӹ7u>Sn\L7od\oqr|!)Eq`moaAw71 ~aD6`-~7JL+$q4(`ETz,J[0/u3Nʰ` Z;UUv _=}PtR[X__~BA~Q.q64*("Z`ުQܶόF-݆GF2f:|A @le8:;&^g P-ܳf0f"X96}Y&$:F: 8&oZ2U>6p \T*#[t4œ`NTxIubEKxoG^GOrW$&,6pX6`;eAYNྙPi'>TxP;Sn,Xl" ZtgA  (:B}Z 뇐ϼ)ec9*7dP>lD$:FŒ`. rg0ԥ?k1?:Kh2{+1lOem'(/j2m Sesa{͋:q|&~;9z>^Zuo4C _f)|şku|rwp[UDۋgܾ@%}*qsPI2'G;~ǣbŬo˻pL z*)8'MN0N߳vi@S4(ST#Mx¦>`EdKx=# lg)%*F+iT^* .*,|@wXˁ-@Ԇ3i`70z>D.S򷄲ʞWf{V/mxdEJi@-M{+: 6Q3-gjDcTaè7.g BkVD8n>ïx4By(Q9m O41X$HM~Iq& AԆ4(-֣ʚBRTuVX* d B 9`yy^XujGBV26d =7넰$'0,Ybn2 ,O ̛OvS~;3,@I(۲S{Z4 <a,„=\0ڎf[XwZ% I,c727Tj"OoaȠi-ꪤ.w_aERr(`A`x2ڐQYLR\//u:7EAe 6#RumЩl^ev+[_;vݤkۍ;w5ʷo()_CP{QP{ APP{ AEA5uNPgCA5uu]_Pw;A u[[CPo}A6[CP(XCPb;Aņ5__P;A 4X4XC`}A 64X!\.mӨ2RAjeߍ$EAŇP3 ƥ:˻WG FT6YxsG'#rMoٔ9BJbLIo8XbWK`T!п"x3vҎEץ]Mӽ+An4iӟAM`RdeJgٽ|<'UR(v(yàh0?O'GjG`/*w;]4M`xgbޤ&*ȁyxi?q2MiбLC\wA}{ C[7NM]VϱH9Ic`x|+K3xtg#)9+~L)GРEc"xФ~?j[8WVv$E*W=u-YbnxZC?̴D?nvV^j/*{`ӪrXl__$⌆"AdD18,6uLI!S3¡pYX 9]LcBQ(RFXnx-TR}Iet`ݲ~:EI\ʩ$rb0 )ҕٸtJK$ؤ~U`!BI[3kmscnZ@q8CcRDR>ciCY;M8W; 3|"i%̉lhW l3r!xVY[7D;)'ү"ն6--I (ϊR dM噮d"I5gcO$b VŚ٨6Wyj \8q\2Y&W&\nd̆xv͞Y%Gk5@5!Ǥ~ƵW]{RVT 7#\ ngOr -:ۭ酛;Ln2ѸW GmIڟ]Uc!gQ}еtv7?4ZX0: DW_E:+b9V68 PLZGxh* 'd<~gha~ɐ{ $HkggNYeuFɫ*+!U9Ȯsrc4l84AYatDΎ;wЭ5/(D 85UeHf*ҒKP{Ԅe=Zzz62'~ msljV%bl[݋. n& t 7J @;磒PT&a1MA?XZ/۫|nAnWbgwwqIaf ,x/'bpwiodu7!3 ))XqX czmauKyå^.u~7}e$/%Nmsp/Y1-y :ohzѹn=s㧫{lK+۾]cT~]~ߒFW/d߾gj<&Xw?w\_{+-> 4lǃrߍ|᥺vI ]3'6-+7S>MСW~!m641?pM_]o}?ICkߕ< [+,j"A䆧dL ^t" v.bBcDf!f@qЌ暵D R$AlIpصV{̣U~mR+#>$H:s?Ɗl7oqd?{~MLbATIAC }aЊP@{-Om2wGϷS_…w ݾ)4\ߙѠD GW>}URFدqwIH9p`퀠]TkW$u7t ;?x)Dja yȹ_Pe>59|1L"iZa&BX lI-,jldYSUU_8SrȌXH۔iW[yr͙xx,#)xrH!@Q( βJC5ợ) I(bM&B^"w^k=:yւȅE5-Q>F% M)4Yec[Ѡ9 $hY`އPq$H:|^oz.pYtkݥbdՎ3q^A1dj$X( &۾k:" 㡧ɊK{If=9^aWM5DD FW4FƨF[4s#+Xq,:jp8п;Nۇ5b"ǡR;7ట~y7|—=;^&TEIU )6+΋w>m{%+qYIYKIZufBF,Hi:h肣0b%* jW/!bh*Ԗߖ4KsFf' In>jfDiA AT\b 7[~?4=3 dhB`Zy;ΗgZtc)zzC  NR;\"!cZR"FӸONdUO?)޵eκ[6& Kqg)-]=p4|L?.1*c?iCRbqs.1EҘ̪Qk}O7M );:|ŐB!ub(m q<ɨ V[%u\{ d.8?Bn{&\7ו hZG~pF2a2ѾZ05'F}IpimǙwqp2]شKg?Dmʽ}3֜ nZJrrH4݉f00R@a. '5GtYD2Y E1ٯ#X(亠l6prξ{sXbwT#9AQNJ@t^PEǼeKyg 6*ƅ2@dZ-íE*Sg3x]`5zpo+_ڼ9%o-ݾlr`饭kV3eS킣0kt6Oml ul "m ۲VQZjN /q.uш͛H;NO]pL:$57ids-Qf|zy޳{ܘJ+(ީC3k-eB[s0`:3@!<*VD|+y Or. # $T f>\8ֲ‹ceJk{+T1~Q> 88.j4xQz;NJ?;Itۇaqm,e:ƿz#,Z{Q, Fz<m5>ն b2h)@"nLBAa",EC=p9hɟp$1ʿ1cҾY75|LxWy8n60>80xڸ0m xF?Q 4KAfęF8e)&rLMP`s3^%"Rv7MgaJSkMvA+boQ}\P?#0ĥCG~}۴{eb+c:&E;Kgf5e,vϨdAIƹitzUW]P 撚Vwm ,7Y(&PEşT"x*T,q&"LߪG=nti)#|8ؤH&T >8 A sQZ9]&(pG”zB{GvxM~eme.cehϰˣ3-7zh-Gmx!\):)fMY8)3N7׮ mҍYҬE낟e5>Ï229 ?jb*@-d6V(^@\Jt[z//˩o(~kkK7zs0EzLtL){v^Ļ]ʼ|1.%=x$ml{[N$9vz}P37rRxFSWkHBIJJtv=|ܘ.88jIARmCkd.ĝOI] F3h;uO$`:hRanef}^$BYQK3\sB]t@Q7`@Y SC{WRW9B<-3kh4Ae'J,YMD{BԯT,H]ko8+ٝ$|? Al6y|^8Dz(ɱl'm;+NpǖD"YKY'piF>GNuAmu\>Npڴ]ξջȼe1lns>!MrגYͽ42cS;+o: K#3LG;qϛ4=gvZG.%nóQZ`]$d,Qr (LO(q ۪_RϪ*+EO(Cxċ4\(7<i3{s|*JLIXEA0- DI ;yw/0Ifv0H`~;/Gd䛉[e^h"-gA5(_j^f?woVm,mz:}GY$S& T r~ƙeJ@'>砑(]%W҂+4HTIMh}oRKTo&۬\69c^0.@v]㏃Wh 0-(@6GnΓ>O`ӟʗ3yM~;70ESؔ]04^Ϣ<*m,l| `2W?Gn⠟$c;L:ň*zJPQv7zV}x?/=ɽk0l>! 4R.y= FlAь=خ]w^͜*zt>/GZ*hYm6^製]5ĕ; c-ƹ:][@rٛ\Mtz^#03y8PQpea/USvSJSۺ+U+ŒƊbsN¼L(^mj]AZg,7 +"л;?AG;mqE\$7BRʟ+8yRj$pzI:8F ?[8g#Jva8וUM͐aSuMıȮ\0ʿD{U~fp5W1G8v.Xn$u)lzdE')ȗa_n'1vPI| =tainz5c<]T\;Jm {83pVN7 ,~ tJD͢Rf,[ۅb2W-PZY_$*t-I}ɤ[_o9Jn8SNbluMǵtYvq-l*T3h; =&oT_uVƄ-4=lҶLm҆W : m碷@i'y; lĚcMD jKDn= *qqkrH9To7y6XjUI/+;׿?&.0 =9gX3R:s r ˠOJp y\JW { Z|{A\%w63DiZJapʬVJłM4ukM{d[>6-wopeJ<\CxHc3,ՈI s/E\*N<b  &@#1nج%ԉTa( c=9m/͞ZgUή'b,FV^ȿES=+Yh2X;I-Bk?ڣj&.ېKTXg!g)Eʜs u)i7C6+yxF[tȍHwvq\wegE0Xz?Px[ge/u'H|!* !sA;>Zǧ--@ddU_(~/@ߋ+ ;#@_MVS<$+A{ G]1~Gk+_e:-BюZ#|W~|34n)Z2,%B)d|n>@PՖ?>Cv4}R"8%xlC rX!w Mm8_Y_MP_bGB3t 3N*|~ m;8 KN]|m@ѓqJW>V;>B9d_$qǛY[B-ѳ a_X~9xك1Eo˜ֺM1kfQ\њ5k涏[@z#ټ^?wc$j=%T*Ca)שDX i̵RKC4mydbε&!/VTwfŽ&ȮP_Ewlƕ?^5KZLՇ+›ӐpR-02c0^Pi7hJ1xSo0N)9M+jM+fD9f5c7abq@!U44QSA~[+ʖN]яܷ?_M ^浻2m'Hp竩{L+QJӡΟ#DV[5|4nC6k[t:6o(E\lqyV7UT z{Pm<7qul: dXڪ#"mKM]n({ܛsE9wM6EPk$p]ɂBs9;Sw yb=Ixs!|;KR!uU)ᠢqO5lȧ?ht#BO V[oAɏm[П&<+VMFe96- b{7خ]PRiz l}tl(OGkҒSJJ.z&}?&e'&ɳQ ] ɰg•WX\Ur7ـ!hZLj?W{7SW@i ØÜ&'ǂYi~B"$7c܆M!V{먡FbJeco AY "aSu(i_%PxӁYj 3 Ora >```IW:TSΌk( P֞*!NioXr2dHpV/=@%ad#@f3@u-;8gI!7Gf۹X/"R0`SFQYu,uXq%Ґbp k&O]du5iX#5B?&b-p|VT=fr;H ! \6#,%$pH)91(᠔Q$̭ c#GEimBrނ>+gm^`m߄*XSv`[6OHXc sV1892Ԧ*@Ŗ3"4Ci-[?ji<4F(J#^Gi|Z("BRl;ʄy =>%Q1A]ajLC$PJ7Ј#Y`DHr^_V%]clfZw b;B(; zUXV)"HHi9yiDu/+ٚGLzY)wfOqʝS2bx̃r':NEd)wfha  H%v}΁BJe Tym޳$W>Br{s,v bK,zHbV5)-eS 86UUUŪj!Υ]'_.W& mKY."JFhcH #hs2t7rnO9Y@STp $UX?cKQOMÒ4K;Qw"OV)hbV t!WL :Sn%N_Ib<'Kq> U\tF)zL F쌘6lltƞI 6[Ȇ0ra3%[C+Nd`pT e^Y:zJ`9tlqyM6QCNׯI=b03Kb!31%@!i6dxSI3QL" #EK 儨:ij&6u$l8iVIzmZy'_鼰wm =DZ˖v O"R99ITibMǜ2GK@PelDM&$Rt&EVgB%[&e[ڋ|XIK|Kغ(:9,/&΃y\f>/nPA z>8+n`\3Ƶ>'nxY&D;n-V3=uX!aV;5Ǜ*>#͜|1ŵ$9jLtk70fzas(9#vOc [t@Ů30ihWsB1eN` "}WcV,lk&RժsT6}顂Mv*$4=[v-LY6H,rq\a<B)Iԣ2n+4&$~MK]|e͍hK]^|w|f0ۗ]VA׶ )?{n@*@u$0A٬XJ`}K],SlmglÅCB$5c gR'M %/۰% do¶X}'p"T+% nJsT|H=-l[vc`%ïPp`+ٵ%jүFi $W\m-Puwz6+ (OL h@WX-5ܜ*!$Ox;\+X'/`JHwc+v6jvmh 'Lros<,*VZ'qc-a,θ:xZi)XQ*`+ǿ>P 1Dxfg 9IƄ $C*j->nc1LzU|?n=Pa8kwT{AOcVhN8d ʋOwxk}VےH4?>~||u5"R`{׳t3|>6YN ^yYEOe @ף1I"#xY ʀBaÿ:lؘҡʂc@"4[k mfU]ˮ߾?~HL?sD%ؚXfw121sb'zCؐ//pUN8> 친4M| %Q¦p5*[AMXy f3@Vy`˺aG0Z..|_J6I7r›7Aˁ uVtVCwAV뒂ǺhlXAC=rԋI7}~5o/!4P}6 K9Ԝhl7Bִe U羀ʂh?r0\{\1fi.yz-Fd: YslpZ=>Y8LN0u3=-- =Sۀiz`i{W ZU;W vk"T{c묃LKI- 5?k;Ҝ@-_oHm߹Ί2˾l&O/{,fvN.<q$&u.qXdoqq$Op.=:5l ayot3Y纈98#L9sQr./n^(#U{NA]PξGYcKEe02> K7K;LSuz&x|bG#)]&]Wjֱ>ȁOagY"ޮb8x2HJBd gYVmL9 S@SQCX ػbwNy{4w/*Y@m߼Rvu n%hA>UCC]4a_+LCʙ!aNDvBY; 7Ѯ6sΡ6H+*y\9\#X2 cJ҉3h؄ČB) >"3}i&T|np9HSKwhIg̐ٺd=s`1]#+ط梧;h.E[(~ڕ|XH`9EkJJOf +\vJ_t`gW+ [a 7ӓNp%W!IP6LD)-_[MhۑY}W/))<(whr$ZKUKHll+=a1oDŽ-f-fy{__WjIJ|i0 J(ʲd;$-vrA O)~{H4X_R lJ1*ZVj9c+c[nGz/}Sʣ a̳7d\,Ej0!%iʆ0[HZYZr4* Ju{HmxzU/:sjQ!f0F <Q~ޯe=z?el2jxK|p%/"Б^E'&ɽfnv²742biË􋺸w$+I(/*K &^Ut`K&@4JS0 ?EEp T^;<˶WTKq8a4ZFY&"ىyMcZ_̻;!#K4+H>nZxCs"*Q:Gh&L݉SmSrFB$#w=W㻾 ^;B('NQ1 EBlV(Ykg0_/u!Up`TYhNpk Uf7J[9hp1+mK@oK`x[gvps֍!a<ۏH&`S: BDa.P-]䭣lvFzpoә|&j|qD +i\Gҁ%1!6!cg慿IxaV?_]?ٚJ~gceBr]~t3!׸]J^0iqК;)03/UFsWf%SojhEJ"Xbi$6UĤl^Փu z=ȎFf ˬ}Jrq̋{[?^O 4K*x(_GQex~j]-NdXwZV׫q q}| \S)Œ9z>d c-ve $]Ysȑ+yz>:bv k ݜ$D"!&,;44L0̺`l%uˇd; jj{Qsٱx?-wnQwF2T#U\I߷jZ}mvAg0b?b{vWykox,^{k4恧/W0ko_Z:uZy(}C<7xyCP#Yn+{TmvwI/VK#@oyJAV(7w>x7'/hm/$ ʹ؎%xex}FVhLppcܷ~V%d<%bDДyI:dTR9|/>P9&i+_`6Hf4jM:d41vE>P劉)7~vpLKTی f\՚XΊ \moxE+,6rv[ʖnqCu+ݸ1;YkNWQ##WS5f}K8 otkT(ލTPV˪ʸʒla"^k&2L wĬiG>PYsՖH SPMx+9w~9!TAK. bY 6]r:Po.yME m*3-d1Y/$ fO\P`I0O3t}DG>#Ҳgap`.a"̯a8|r-+1Jݼ/T:FI]8GdD h?8;ٔG!ˣPjFD_1`knB RR2W ̣W,媫 !ȇjWk&ߜkD "(0).fiK8ڵPw$~37Ѽ`F*p*LށNɘ-%*J5ӗU]`me/q^l "bz~j"20G; "r=7A`p[%Wx ɹn!!e6nb]@իOwO͐y\FX P)\bDpZ rNR-A0kGKP-xK[3헸}O4én 7fB=<~적b]pb]p\Q1]* S(`͕ tFH NVH, ]"3ꡯ~Le=崯?{fAC>Pl) p[<$iP|j>)[|VPQPbXבbAmuR)07EÕPDCRƺ㫘m"fW6%U򒤊QE$AfEUIVOG4fQo|R%񆆿YӪ./)SD=Iz_1 78PC-Ks(Dd]30(?md41vE>TjTWA?]rZxr?eU#\ #@yZ@gw_U)fc9-}Eq6z/L3?1%sfnZ.|䢾[KHRRgu%U}J}6+iaY(*d!o˵sh:;LY=nCD+Ber(5 F[)#[MK`ĚvK`@j'^zZH%zfחРvd=WqU}Vq[Cm߳E܋Fwvl /$NEpx[Njw!Ceԗ1zkYƲ(blc\ 1aO׋-tEtc-yX {l1h溳sc;.TfOo-XESL6㠦b_T ?5WNȼ}dnwG&Bcc\ӉvޭzM"po".sl*]|p'Hk "2ɧސ}M4s12 ,^2^׶d4M43dEHCV qɟxs6X6o?Z#^Ъ)ԝ{ښ~0}7ϻ}e=*xUM/f!{(ؒ|[ ~f7[ #ZoFAs=}~ٱxZ\F(}8{?;zF[ԿZ#h?wls,ϟ;;׳c7Cڡ]54 fa0d34f]ЎzMCu 6q -zyVE7[b5Ec-ykXT-zQKN)vfCݛ Cet>PnMӘE~ 6~f&Ht5*^zQth.>A ñ-Cw׋V .鄠hށR=(c!1ajS}Fv=lWx}MggURQR{}'{g]ɣW*Wz=K+]j&!-+?~U߁O7 BSFdcV4PO8WSLú=!wNB1S:,e1UGda'eOp+"#9W X6a"c6wWe෯wl#:R50kܚg GGݢ J &܈ݬk=,jO㗠!$z0]pZ˒W-URz%aߩ&z82؊ LI/5=pzS;] <ݻ钚 zWuF8M@HWO`;$~FMg%;myH5yPU,Ķ̌QCF%S5 eK&6 }1sѕp0th,!x,ȇ|h}!1( %MU|wIF% Hr.<]_jH.*z B]=|Dh|DѸz~1+bp"N2H"%EK`QÁZ|u(򡪵'ibQ%:dTR9|%1.;)<%)# Q2<,tRGE>Tr2ɏ_h >cֽ0S,Dt!HIfAh!(*9Nr rMI tp<-UT%8x<9Pg?zvsϳ|L"x1# J @5^lf¸D* @)8&jՌYR3|+ gNz_w?-Dݴ<ځ-Pk=ܣ=@/EK[ΘCU1 >gx,Oukx8fcQ`FTptzDPpcŦ U~]UJT:KX:Mas'VF;<ڰE>TB83+~n4r@OnPj eգah˳ k ִ(WTW{;I;*=YWC'gTY5bLX0yFIT(xTσXyP-͔ޘr" .ڎ⯻=4ӒRG?l:L"**#H `2:5>xv?JʃQrc=/CP(i}nC/Wd1#k (xTS\fe2@\sI 1iPՎ1{˃$p "SW",*p,jW*=j%Vy8U` t&JtIڗ-W..ت6EC:'hE:d4PT#4V}vT'b};"jçtRmRGPT>bԖP5']uOڇnOp<Ϳb&9}q 3CF< E>P|RM)\uݩZPNXtXգp==u%$$ U։[KviPTnE!71;dӽ7Gd/$`yB|+6!xM(*7կ=n^yi %J ǍZ2>va|Zð?fIȇjs4E>f(hPE0?<-/qɯXiYa}/lF_XyVWf0F16{$ Jb݈sp˪q:8'Z~^} Uf׽C|:2FhFUa P:ʻevEu`S nr]'M ?߽}: /QJ̃/A ]Ȑ9_4u{wb:0՟y.`w6>Ez|YR.|r- hs.cbs0ou^7`^xi Uٯ=O)&%lcf:GU!'ǜ+fF݋~yxqNKi1w .G󸝓0N ^9d]U kZ2 xA@P ǜT[bGTLi2NX:D@Sbt %,.Γ#==C>PrĜ_ʕ\gG/ F/9|ԫյセ8ۓA'k#R%{jݻ5MadQ&7WqP!rmF_kwF_e#59bo˪.cun SǑ7Ƽ3_!]yXS@R=nt|(_˗Yvvr^;shQC("?&+b"cp)Ewu%fqwJ\uXd>qq %U@x OEtɅ Q JEEq3 i̷f ֨g3P\ _T9jX]bz^90wCt4mQwb!_Cud6JԲdz9a6!=<~tFvs}*R'.ȳŸ'4qC@Ǜ_VLaIaLg\  ]6`~]*Q (͏9I\EgD 3:f*-Qx xƃ7?*U*h; WfoS2A k<`vXi/=4zy ͹CM1͇9 #qc.nm-mn`&߈ڥZxM#2 Y :BJ U\(JsZ9bQ(en o:J;lM4m#.?\0t*˗UsW-WnHV 584) T3nT je@Ne$XeU%YfRUxP!SsG3sQc#k@ĉ~*CD ]$! iR~̴C| $0qVP\)?wBryiϯ )}|Fl:bUi?M!4"i1Hs9LTd}>m4Ā˞m1?>(35`(,yJҞ~QQ"?qΆe=~О2n+I)WOnxbkbPK(4>L<( i{:z{HcbCWVcC?_ &jAl;lj4l=mj߷pvF&?̶9Z{3m@Ċj<2t<К@oQC|J._-c,Ujѩb)G^xx:+rcA#֨(;EbQBQ@8ԕR)^RFg),1t^ޟ"W*_!?G-II <;DnXJB {blK/WIw~ȖیO)DcυZe|j!rm[N:*Bj^R2I2ֈnxjkkPw|4nhq%jx`2ͪ=e8-W~5-ayY@OE5,ِ KpVBvCvC"0$/,gOqn[X6+Jw`JSRRpL9'Jq(Ő)rK]>[C <wF a*&cIxx99`&v[*n[<[;Bj!^BY&d>H}8~D&>DyXCv˝斷Hoc NVp%Y15.կ[i].;%?LհsE9Q^lstPp }VA4A/xL3Z^-T=Yy%ߴ$L_Ւ.ga~}zYu_Yh=@飁O;RM0^E)+⼵ h= Z*i!\XRD SW8Z֟0\p>XՑf6^7hҨ|ң:ba.2:R 耱RRDip+N [ee.->3_`zRaެ8}#Xn[4wsϩkw7k})wx܆F.?LZ3ȍ("aqx&T9_Ґ b&Gn~0.}pg7sOKRd l1'ex_|Z'WgU q.P;Zg֙f]tuvxb,jó]Z{g+B3r"-3J¦qiq޵x._: MŗIuPIy^Ftg(*kǪ'pIwtQJ`f >6ٓ+]7"PչC7 1n;v@_ EB@6X8-[k1D%Ao-\r U6C߇efV2TGcO;7<:1w d-$ / ^(Df.T G¹N.p] bnt*kajo^Al:V޵B 2pSԻ(Ĩך ' YB>,ͼ^ڄF Gλ[-iQU _`# KiaէPK "3[#hk:߫i_#n8R }QT8[;8;_(ϋHM hxZ:㥯<|/yNHudEQ lѱp#A0l pQ$ - H /qWW,+Fn~Z~ hN-bfUoV1vU;A?h?_~>= LNV|z\ b/Xӈ!n֞%U_-C7bJZk9^l2..l7"8F qc }ȿ+M/5IvgƻYa3]fB4ԥvpK<VI,̊{gC<$Jbµt]uNxSgo#BemAm , +WQrUӢwQJt֐ kH0{>o+>!oYdo* |e@=ٰ՘\1"? V?`d|5dqD0|t 7pE|x;ԱkB;js,y):؃{πv;+o# u:ף ^y!^u0!%uHU)4p]@`օt_mÎ\I Ha{Ŭ^QčE/.s2M2Ҥvr"Vձ% k'g9%mV>+!% <[-+Z_F2\wwĮm( AFFYIKZ68Y1ƽx4]]~z4DŽ[+ಷ[x}ߌ FŹcF7ݷCBp% 8iʣ*š %o~W0F5--B!1"m-x }[xd$nm6kŒч\|u:8'"D0O{˥FKzۉc5F/"c>gL!1/DΊ8EK~5"-.6zѨbQ`tJ%]G2SB,NY?)1v<"x7r:Oȩs/~~cQNfMOhF|ˢܠ4oh?֙gBڌ &j0N yWG阄/RX7nBا0č/&G#jES蕭_Fl 8f:zV}NӼE2=Qo;~LOC16W1@4Q3X& kbD FJ1mIhŬSxGQ#X,׼|@5=:{{FuƧT=M0wCl#.  cmrldmpX4ٟ"YQX7H+EYρ<6<\m-ٻ4oO BF V.\Ab BcQܑ#E^4G3&uNQ[V cHp%U PNb'O#-XQoob4ljAs d="'eOըrž^w@^zEcs ?k]RmA`A`0ė{ќ9|0ß%oTﳄ@y 32;f$36$<ZԤu U[B끍6-0߈6(H)ͧ(hƔ&c`}b$@G+R_uU s!?G-V۔&>!F?ч39s9IK\p/W$ÉDs$i%^'?0 ~v ʔl ??B5o?0-q4e;^!܊Df̻`PN EA^Qg:>oBVJoW/5~k#u]|5`o+(gB *bԚYI\+U0+iM\ɕ<1oڄ7j۩E5mhGVpȱĽM;Ev8JS*@p 81Ϯ-+XeEXr[qYs-M"}|U(pM}|mqZu._~n6{짰ƻ*y.hn+I!NIdi錗%굗80^ Y%#ڄS׃}_hϺPuŽ 쭖oL\/h(/n}M:]IP hʜ숗c{Xg"-Ʒ‹*iEn!ԙWAGI{ %~4-^ ^dRd~?XAU:0ȅWȜB v|7 UM2IŶg /ӽj3v,ʥΡ.ZiNv08&fO`]vJ챃*?+!zXV:q/&'=ゥW۵';NW.jOcV}eLg2x_ń$ gEu83 ӣ9[>Sw:%[nqHbn$ou)@0^Qu7/JKh\g,<|HoCm4C7:{t7qv=f67O9Gx{(ƍ`asߺ&v_lt_?Fza!Wq X3Hϛ}{ŪY6濒/<%z]޿)փr1~zϟ~uCw'PGdZWP"z;M9_KiܿJLQϓx!.N:jk<v>|a=R}=V~<eL?ŰF`^ߧ{~N&~XiYnfs8Ko/_?)˓Wl*o5q wtP,NAZLglڱU5(J/.Th81zkX~w?MْGY_ij_7-!>,vxW$1Z~V6+w |ʂG?5U\d3F8wmH;vq?]Kw2ɒm2y*N[m.?_?lۧUTM.ZzS0If9Lw+-5IeF|ޮpv=XЄ{s',+FmUw779# }2!&*K;e z$Y"m3!Υ *ܱNmo  O݅$ }oO-yA ^*!z& }9c>HpMz;M@oDzgf:-cu0U2NK'`vmIAplC*ԏѨ7ihBږ采9:^:>^/L |¶#ΘWqrVϫ:s{<{3q"`ioE#jW}㵫ƞ]4rv!Pؚ7c2CS ?", ,3]8.|Wa0Om% 1cY \NB2w*>O裵Tj9@8oQ 04QI)Kx:c0K&9 9/ 6>psG771//]vՋ3I`K FAo9dL[E+p:Sq$,*L$TՓvpMU^CZh^)S9028XЫ<[x<[ >vO*UUC-s.W9Jp>Ur0HiLqo@ C!cfPF(g"~&wW!T^w=wmW?brU8*Kex+/{rkpHaX $PI?_T<}%ϡ䵈kJNQ/-'lęYYZp:v':g׮u+7jM_DG"׮;~@zTմzri7ζ`Z<~0h+܍=kXIKz\@TQ'@ljr)0w 5P/{ ډn\%IDZH&MB ma_dVipb~07 c:qx!_PM5Nz} CzBj §XɔS$p暴=~heRQK)?U#Y.Uw&48|ǙNO!ץE|0*n/3H…5t W?y ^epr@-׎jJ o`ʹ$c^,-YaeCb8 5Pzx>i^XWIu_lz̪<ӧsWU0Cn 씵-@4a(͌*%K! *)͢8-fM<%UtT̝ofYoŽ?' \ѭchdY^'y&y}I^~!~I^}P9$y1#  ,NrW1+#ZyZa].x%9M:3k%`%w5}Nd/Ho 2kj0MUܱwjRvMxnpףb4'\Ζ"Yݦc˛u4m7=kNb Sf$̐"rk2O0`3`aFY"\4qQb&ʶEP%XդIb*-$/4Vݭ6TX~yi+kE //4)VGq.I]<% =8Gq[P[*X锔6QEF(1z(@i(5:߇jC"|Bs B\86P*KՑ\!.dT99˘2F*WG۹mA=n{9H4)|vأV\vhusF Օ֩q. =kc}F6*GQ٭)YyC@EktikwW&,dޞ 9=IQF"RGNb0`MXF\XEIE`ɨ1>!ph쏤ڂ[B)]R5+-Z'pW⧿W _;tptRs#S(HxnHF7w,1eD`Ȍe :xn]܄s-nX KnGxq xHx1z^Gu+'= DRR(lIJ2!Ժwa$<ƌ9͉d\hSHiQcR~abʙ3NG`x#!SM$K ;"(Y:V!-U}*Cc0A>zO6"$ L"B~lc0B}w9>D<gjx_-i~ ,6dmMmrK#/߯ؒe}m@[-V5YX,V8CVWA&6EQR0PZB(31y~]E9w[_~th/>肝LJe粵&6?m"87.AgY`D IX(A"<ޟTJIER6/QboǸ?BKx򀚸|D :VOLk xN}t3)L>MdoaKw?* U G8 U,8SK kkQy=ga!'gVo'TntoGyq$U.^b:è=Ѽf%@,8FWF\%x 7y SBU2RJ.q8xgG.} ~\+o)p)*1B0 sVQZΩJ]i#+pq~x0 `[1"控qa}%a@i?3(R` rQo?3.'w تaL  g)T%*B90VW:dG%ة{ѱxpF!4(,2.Knx kC22N玡b=P[xxZ2m)P0`JHGHO=[Y+J9)1b𠻘Pq]nKof@xVYY<\⪪@ q/TOxUꐑX~ (>N y=: u '$.T@,b2ÞMAlDqB8ֈ"C꽜_mMMI!"S;PRxb ^ N69JFL&pftz=\]X$9kdH˽`xbLDlf(>1U\22DzpMpMy6PYc?X Ox%LJ6ꐑ8Ǜ+iJJ@"I$*UlVXvE;ddGn)*0GIL~u4u.|;0Mh2\sd QB8d+ky!DQIi:+|uvÒ7":` Cj ML-1Ctft^+GX-yKAIҬ QA2"Y\.Qw8 Q(8Xj%Of'pG0RJϲwF=_T)No@_T3Ñ gN|hc>Wt'mRIYp:TFh (yTuj9I1f`"O곈CFfpD@v}Q O|A,W\a$.}nCFfpU{)#ͳ1 #g'+M zlǠ!#38Z nfr2_F#tH VG^S~xd-#38HsnfXػ;&m?B9u ?ˁhXviE@ 2[W 2Ǯ{R !#/8H_t Ns蘨EP1108 B(V+D3hpZ\VCGm:;4 ,QCژI|U\e?Xs+2a3W#5qs;zmf:9p͐|D'JǴd28c*-Jwk:am1=5HT#gEeOZjRھ+*3Z,7*6`0Kn>uK$ʵx̸N|p{D_d퐑 ΨLC)r#"q0c',X.*/vHv$(uk-:DGq鳸qj[ o(Cm"CTei,6GFZFfpdi?V{\"dDž+Fi 4XZFfpiSɤ( Txbd!ZUVJAHdU̐$pTH\iO8Әde+E2&$:P+d)>,xBiݦ3C!Ӈ MSUCFfpw|I!6g9Q"B$;#ڞꕏ139Qޕ?,AvSogy?BTA2c]j\SWk-*3Z%/A6(S E G k4./d '1GR" KD鐡)M̝OL VYنi' ƭ%$3<:+w]i!#/8)R G=,դfːkMl=F6@'gb?h=3 ,}Dj\'XDcᏢD!z,0d_nM y軒tf:ZX-{~{5q t~qٿ* jW#?{UmLc? m,Ǯ#=cFQbAe^RL}1\,|yxYnxw5f~#7ӏ7֍~XI9w54h&niIgozrsEԋǷOb?> ?b& 7J0amc770ȗ<4oGO?odÍi}?>Kq9KO_ƱO޷A6m|XOA=Mzݛpꎽ gqmo=sHtJA(-d~NQgpX:OLMڇl>,<ԍ߂mV`u t0wq=^S^mߖ~4 htQNݛX z46j:ߗut{կ?zz_oy쿭t\m@ o0pɺWmI{7/vA70gVY]͟;O2 bmݦ7o3;]VNVOV]n|18fm}y|Y9=tED{5Oۣ_֡}^5wU@~~JC' *RF0uk3%GS 2hMLѽ/ӛp^?6;0Z'Zyl-uo߼D.?VKF@݀.wXQcg[߆9^e& TM-QLWQ0Ha "70=Ǡ/ɗ԰d9&}U8H5SDcK)Sgq؅TacɅX?&~w ;Fv DQCSmۡ ւ?3t !<BK*,f$W`џܽGE{'GxuCY!/`"|I㉕ER M4 { L\ơ$"MP"y_X rx`M1^ >Rb )&P΄ ‘鸊ԕ8ݵVZ+CA-H+[uaF4HH`?^Í,-;ۚG;>[TWlr3{*=X웰L ƾE a/7O㋹qLct>Zv"NKlFrL N/AҶɠm I}X+@8 ΥS;0(׃%|%7 lG)GZ\Z@]ka"GRF;'K׳z1^N5iQgt:>[p_Q{\勣74H 4 XHV*/9t\]yh[aC0.e "_W3wDGpsnM2\J\źtՈҐޮi|0{X5aUBdUIsN& @fW0w Œ\{k b`Q0Zp.hR0 / H`B: ZT??Pheh'/ 5D 0]]_aeѐ?cC$`Y EZp2c !gAA#ɉOdjvvbmt4؈PtbVnAwwsl䜗t ̢:okk̚laIh/zչakqCK={ 5kCF2b:̓ʕC |R-\Jp}1 \ЀkCX5ȃJ`ey=3Ѐ<oEaO2 B\`RՒik e~x1}ݍ嘀)$-`QƨLeT^N=_}#jS^RIٿt|,Z(8 JqլrV8<ų2`Pΰ70 / B lCX1YmCh{ާ.ĵOavly=S^6L\L=釬fG`fDBoͲzN̫-i8EψW.1mQմZM?>ӵJ ˛ mjcՋE0#rVRZ&ڳ~fОI_tdVOhz=chKifz=̦o!vOIiֶ! |};Lk&5~n'z׃:w+{ץ~:buKsWՖVEG lÖD_[.C}%A 6\4?Y]*} u@,sKOS9T-4rn[*E`p%*i *iJZ5!RRT%-UIKURT%-UI{$&TJZB`JZROJZ*i$TSUJZBjJZ*iJZT;UIKUҒHU*HT%-`JZ*iJZ=8D~>k;l>;s8x/˾V5%Kǻ(Ynh/h}׮k)^^'`gLK *U\xs=-n>F뒲{/A㾓ӓ;ƿ*vn=:pM5;4s?3ʟ.]]5] G_C@bu(dT2d۹5=YU0S1JSF3ŧ:4Jǘ-}t?K>+T QߧLҧciJV ω' )WC>IT>b+:U4~5_P.ζ芷ޫ>ӱD|P?FKe-<ʆ.uƣ_#rLr3Ć(%Rg*I;N/rZqV 9k:1^Yv N~ؗ,ŋrN` exkCTL:<8ƳݓX+PU/bySrwe*dYZk:7]ݾ{ B5r8a ;BDxFiJ>OhMmރ/ni⛯EYozbA~l[^'״&&t4&mNai^Il5^"Jl %^kb{MlLnByQZdKP,Ks#u= Z3M.vX#?Χm-r։T^3k?LxDFIRN%Iw?f^f'^/IN W7ё7/ o&`P੣fZ+8BEJIꐔp )=뀉͏?,؜>L)+43ɂ?Xq$I%4p (J6æM)SAB1]dL*Cf(g"@`>A~)XyFˑRͱB2`4H@(t J1ΛBaJHa(AS {}t5>s p4H#ZAAى d;0Nk/$+aٻ6$ XnHrdH*6~3|I"- aTwUW5_'%݁s3SFj)x!n\F A*G&XDt>' ä3zȜ*xPUx r].˅w.BnYx R].E r].˅w.»\x r].˅w»\x r].˅w9G-/zܫ> lz8q^6' $l$LYmN߱5OT Gp'?PB5 QT@ x9)ls&ݹh6r;ΡF8%8J3+܊ryVd(48+N^P؝? ʊc1XoGVlPkP]kn׽@@*|Hvygc04G*s+Scs[#F SUQϫO EZ~w'WAYeuY /䉁ug[։SLլ?h^US; nqn@ ;NBcYa@63ޮa\KG#3RrCkH4V0v {F{gN}s*ZװRonMئ[X0QH?V||Q}?MptU5M z-}Tw5my;z<%&_NO]b܊[MQ4w}Ua~dD(0jv3/=6|)ȵ*nCJQy"!&zO1!.VP8G; >2AcncAo- jߟR(;mo6D,u`X:iMa_c;0G^KJղ6X K9KygWwkC#l:q#5;>ygtuޭšr/_um%t*;ŠVLc0VBeZLW$Rar#69[XKK#5 u D3|=%]VtB'~8SE,<罠l(Iy/਀^x {y/< )&D5^x {y~@cYT9H3;3Z!E3- qv7| ;=V |Fx*3SoP=% c|)Wt}lm.(4R2B*{<znΏ>| ' _>Cpmo&·vWHGdC$gid,(oq) _}U| ثZ;\Y[?I+z{q!87{9WM]KHj%#CLTLyik/{/Yq9k1vXvK=Bnvi1![B'zݴ IZP+j]86q~k՝bB-j)xw]δ9agfQ^.K3GܼD5U)6WbΪ#P٤8YgI@5ϳJRbZßT^aLW_v*+\ M+:}yV5g{ Wq]/G ~tM;{[ L17NB]Uw|9[q0GbxU{W @IsƼ%tлMFģ԰Aݳ}6<tҲ3&.nE,*og.x}/w>DD䴬'eY[59aސ(TXC]  w!A95L m(njފ~6vG;]nD;,Qbgi]d\iY0G2Fޮfn6mBY3,VJS^wq`^fjh-?+#EWXaRWd4k/M$0ο#ﰊS=n\a.׾܆HhZrcvyx0ؾiB9{h*A)I 90sB_j2(KlQsj03\y)UIM`o$9ؐ׭]US 45OVo mWnPyϻ䶫+,AոM'o4 1 &\l` (T޿kp>A Nr old.#Q!al!Iu6w!R\ Pp֍r5\IK%rξN@i˜Dk93E-cR#^s_x`0ڀbT)1'@y.wExrox*:A[RSuz_.#F3 O  EW{aBsؽ(Q~e_.#.<V3]"S \G $ ؆g~ỌExV G~@< Q0L!{|wLJ=z~_"(^BkXSןf{dEq`j`8 *KJ#H]a^ ?"#IrnSwef`G_fsHgVWvN[l kr@Ct1+FKU#ډ0.c,>ՋrIQMUOrahuSK_;>[)-2˦+PE|EB5mw1 \}SR3yF `Fv[]Ԩ~9sJ.tz=:|iD})il_8epV0XF VmhU^>9ܸ;H?Go{bUs1(Z}eo ^cx uMV,ooZY.FCf X{Zp)e@Y G.\F͍DgaxM}8jL'6< TS*:sX$;917~{}Ե7bcE+.X'S<_O?}7X~+Ii;٭owϙ&/Oh~ fZ ci?| ]Wq3HVi" Č27.ӓfߟcJ|79E 6,Rusoxsl[x׿x6#4`ⵄ8iTdƗC11~'bK]/d1ql)vK.}֌5 )8+yīvJ +նy9_kҮj|;ՀBXd!7sag_gn>WMGy3UK6ZiSf.#K*$[jhd_d2y0v lG7 IKKS"[`8|{5ibKG3Hlg]X㲂g͞ѥTU;sbcq|g-ȥj>X`a<>ϟGg-ZDƘynk=34Q3)ShhϚഛy7 )R1t=cLST1cL9ǛkR1ƄR1T1T1cLji)5NJS1T1cLS1T=B= f5RpxHpd`IAǂ$!@vߓLxwɶ5%~]Ea9ɾ xNRVa S띱Vc&9^&FSq2woz@zklkqxY@ӯEa[Xl2[54bW[Զ(?(hpr=Bz'=y4m ot]I,DeGaґ꽚]4?GuOgtla3GTanɘ+->CݡqH5(<1n?շw%+&^CKGo5,'k\ѺT33]6v6NI_u;;|^ϒ~\bNR;!CHR;!՜NR!!CH!vBj!vBj!vBj[j!vBj!Ċ (fb]] v&jQix`,ϴQ*ce6(Y8SH|]ZA^ͅM٩~h k+ػ9&tt!t6ZRY]0^n<./Nۭ!zJ$pL& ^Q#ENY$YKCjz{I-;,П<`~oͺ0U˕9rN\e2_L}k.V23MGټ.yu݋Qh[6HlZZ7 ]W~}~dhsFBBe,YJL&0"~ T5hlVf43rk͍F܌ hΉv&&tNi`TtymS1̂ =!G{fhp8Nnɑb΂wʰqMxV[jzFa"h*q^T}뺪3r7UWx8,u1}l,YX<&wN"'re0Kai6xdIQ:վȑDtUdynTZj,sX߁b83 ]^0EF3fX! {N^9^ l-v*z˳u,fn4.6: ~V (<ְODwwM u^Zr 8M(:gX,,p"-J)k%G1ة7ZXa*XFJq. ֜F06 b F{ 3؈ 6;6\~w>/d~jfxk6>e:mn ްOFRYR gX'clW-i~jHuuOm5l\[-]O<e Om;s$A- *ukZT!vwZR*Y6[sֱ&;FO6)=ndNp o;ln5Sa銶\i5ӒYk[a޾-_dr=]y7jC4X)U)Zy-5hRP(lx5h@?LUZ23FQyC/QzJ$JȱՀi{5ܫřL%6M}W-˄{zIHaQ{ƍd<⍬EA6L ѧ-,9"'ȿo5IQӦƒY&U]GWU﬈Wc;ɋ Nav>")頻a>Vts*gλ*Nk $đى(/3^q0{-w= }=!Q<@wdP?WtJv k8EOKhe8WT?2)!oLJ8n=.:q'{5.ǚC-Ow]9v @_PWMqY#q_i}RJ](+ծT?1c3 ]x>vKC!z$>]9a,<ۏ6NdM/T(=⊶c`ġ0t'#i,>h=+wњ+P.f9="/X uXBm3u@+fD]V-ԇ]6JԅmJ!S@tֽ}] .lA9mtN*!hȖ ~b8z/t[0Kӝj vvt;!^7.n~Эۼ]`q} D޼\C*hѶ 0 ί\r ߙ]LkoP7,@Ul:i.g߽;x( tkЗ;chZ7G Vp).}ԖU`̙Kw]͈*sV z?PE#K"P'ZK(I fh/#hHHLBDN6%:)8,r!cj*S@u4H`&$Z`Bx`#mƴa\,uqKgf̆y6]([`!a9qβUNߘIoyao4`'kvpjw.DW![0ZBTU%(_|ѕ!{RxfLVكba B=f c/5iG.fJ />s#w"yju]ƾb@ c} 0o\Ҽ[0t؛!R:rR5' įĔ4"e ('B׆s (WKM)lTbQA@`2Z[΍gΎ[ ,[*M%fD"w7nvzw]LJB%Kj RD!XF^:3c}w0_\]Wgf)11\ ΙKjjMlEד)Ms>+$iq8 M/@57Ñ[P :%/E(G[6 1Lb}xe (6.}.wIh(lc<տ櫼O]Y6`e~  VW4_^A{L$,OV`3,DpIήمbL+;; sWga{ٿJ`iƚ&t@ap1[X;>EIXoIcS~kSE_Z$`#<jCލ[p"q@D/o XIVÅ̃}QtNK]WxΚ Y  o@]P=>9KH Ry?|H@G;'f {O%: k>>-1YmϢ#h}/RP{_K9`G"PRf&q&5 #=hz\Gb&t6fMd?G,DOB @¾P;n3a!(5܊9wm޾Pk4qB;?;HUt>:>:s5G]k&V]tDu.EECb (?{V)ƹ`3zz[^rEHQ{ZrK3Ã& ϊˈC;qAsLJlJa^iGjd q)Oc.*5XVd{8Z)U*Eo/C` ~rC!Kt0 hG[^e+sT_<0|Q_W95w4^S6&'P9 A-bBQVRIv*T#*3T}J`RGԣ3ZG&zG2|ujU괬Nڪem&NZRĔEaiZϤi&?p*N:U>Tjхȟ"U)iGj{>of!kQt-w^ї 7A'T~Àu9ԺW n3Ц͖l݌{73Ժ;M}yx;!/Ldg9B;UhǞ5-VZH4gӟ55o5XhӻseꕜL)T},gFo?-Jtzg }%',ߛZ<|V=aR/1",DDꥦ0+CʘtkVէnكn =?fǝUع௼se-+tUh4BkM̨3hQ9b0XJBDZ)r9qBOo\ռ;Yҍxem2eb1M1Ui`0r3vYiތYCPM<VI\ &>[%Dx^KtN}67A8}*X6Qʟߦނ68@YY"SY>ga*EJj93t|ΜϜkn:"b!P%\SA`LIleeTƣ_ G`yc.Y33V)6^DIUz!v(6Djjn,{cA$mO׍uJQ.vǥ]-8\Q7%. `4?7 &*<jx%~S<lMxHur,o vJ!%mn[CLwyE&ui#sc ޴Ӯ N:W ËfKz@91-*NǼ-9NZ~|q=^Io- ⣧N[Y=SH.I噒T$8~{3=M3y^3RkBzbfi$iWt9Ym8a$bH21FbKA4rwxjGOxjO&>U8O-SeT|Ž< d93klS՚ + 1jP 8h2υct<'v<'vD|Gg!:f0)]fٸ>Tyx.MuJ.%%37xlJ>eՙYJ{Z7t~Lq=ժv~ZB~6$%+x`!G\Ha ^F55 >n4c]3a4? 6Ϻ{3*dU:ZLU=A꽼f:#))RFHobl`:fyt)+I!L({1t4㲨k 4qXe- 5O;1 ğ n8|>LgjȀ+,EN\AW5% jl`XC4hӆp;=Yw$aY*7T4 dIN)&O/84 Mڹ6$ENޑY(U& ٬4QE|@RI1- ,-fІ5-;I$SJMK bOyJEn"y1j' ngB;B*Z_04Hpq-l'eRNIKAߑtnu/w`\?vsSL(VogPN@VÑ7UKhΖJ 9(mB"Q0!L&r2+Ci6p)U>iԚzFT"L*&.Fk,GK#|9ԯG&JPAj aqm- ^iEHKcײ6w9M%sD\qOTfjX0! E.){3I+H~ ȅZzY8j96\WIl,PJ1 BRB @r Xkb(dܪh[Uuwc+cҤZKhBE+ %A[Ť+ XPQ*+wve|D IJeDKj.Ye,Дp"5c Ti fv);8"8|2{AJSkhwVd":8Z.[_T5gϵQPT ٛa%Vi g FKM0_ uZ)Ϟ̊OF/K߫H͆'8ZC5Q}N?pex&5>.}/==f?ў%?U񿪍zU-_?\D1)|/=vFTO,{giF ^:ϕ+Uw鼰j`YrlTuG2K;T~ؽy9/n,[L_fr9j8_&UoyjGߴķQeWh~Q렭zuĽXSP1e;g'p[mfR=!JJBP0~rr_jڈHOKz;@7 -4$yD"sTye4hMQ/ #v)O!AV3gTTKhs+ޡaZ5쬔iy4Oo,qL %,OXlu{ϋܹS][2v ힺLp/;?h/|鑜IcdK njVH#Uvy_f'vl0K^M옲4 9Kg6@#7JG#b Z`KڮΪdS:gPg>1M&'Ǵ\% ε,-goY@rȖݬ|}~<>ܕl+:B%ӷJ$?VY#>#ق9c9QB)x ($` u;okX5R}Ekuʏ. h ]P'PTd{#}oZ,M>n#vOxo^Ͼ`7scgqAL[mC5>[L/eb5 ̊.䦓! b2,02\( (%i$ )M7Q(oqXx<1N}JpsB+*0xѷ}#fN.G.+I gIh\/,XPY(v2Qaj`CWf}zIm6.΅Ȗm:t=Aikr!Nӽ-csNWJT$_<L O|XrRӉt;O:j+@H[Nca[XLlR8t?L[Lji*번6Iފj-lhC+O{Kص[m\ȿZvDLiT=xa0"Fz@Z?I 2@-hHd:G]xX7fGTFTWlR2D'5:yTEeIx!&e Xʔ-6][;Lw\9k&!}׻Y>'k_KSl A A=ō^:AuPz{POk dPoYB}CY@|6:j'r@%8H~I ;[9XZ*,ex6͔EeR6) :Ԕɱ)Z^x9PHd t_K*eї̅ע*joyN/|[ku]I`<+3%ntIÑ|lE-7FpЌ_ ԃ 1H sC"y6HEќ8D1jTYE'ELmC"1e]" !]֥Hb YAiDcBkǵThS ]$AGug_m7єYhگ^*!>nZ:,)DhHH)@;sFO\sуhP dmJC/c#iEqr/$3ݠVcٔme0>z!)KjJ bIZ^g:žx^xo*#9EP0Ĩ؉:šI {ƒV)Vdc[?K|ݞ; h]*D*fwfV{t_i=fXq_>|@$q#:grM(ROy~=崴Ɂ̱ 3 ;Y9OK#,M%󯻫W)򌯯 SELG eZ30`h1hFs+%@[wYW:@9 YHramUREF i;)#ؒ%hx.ZMPA$ I"jxT~8!,Ѝ$Sl[)r]t5$QB?2h6ȯV%qVcF;ĖʲdE~l|uɺ+)GkQʤJH1J N9ťoW12#XP1g&͹.K=uݣ6: =˨fM0_ p8<ThE#&xN0+D{AFJGt`ZD"rRuVLޞRQXBg Rh@L> 'H @XG 8ziјV@A QzHLdu4(@%LR#1s)] )IiϚ@;TJӪzNݣ^9P7}ӄ.<\1D?0#+}pb|=u!\0s,M'.dYG0/rFϵ,wJÃ&>eȩp)|Ϸ,N yK+g/NѕY򬷊^z"ÒB uQ &uD9hkcaLzԋ*4x v^구%7]t(]p8}fJײ5Ҵ"㑃Mp㫫()[kP|5?|-nkgo1s0OAL$].9_>TG 0~ G&!`( UbM|כVvs֪~bhol[|Rpor;ˬDl]=_Ͳ+>9ہ'%/p!Hw/C¹Y]>1c;*Na6gserit^sT:=}8w#sm{@^ L-O?[ ηE5 WQ^Ymiɫ 57CΦSW|̒ro^"%6l䌀U4OWd"3<}S䤶4;('sf0[L9PfJ0;{˨mZjLuQH)12Os%I?x(T/(sB|1h6 + #9u!X`T9F$㑅H>HԔ1тFQ4`pHtZCָ%D@ml2|'qwbwwRl,u;t4l/7KѣZ eW]"(8j@@0&t xJQjCUBzECvJNcF1ZleFoqFm6*GT KIw *阂;n ^g:N.*_|l髷ƷYm_^&.|dva9:F۬H4xתmӥS^׳.֠p`7D͸vàH[el^CMC}5UW ftQ ,Rg2DO%R"k/%%H *XZYh6tA7IJ 7gE)*@s[/J҂#6(/زѧ }ʛ:EOCS '̧#˺ tuĄ#1\-6t[ՅItפ%RVvHvM׃광[qnԚ,|}[rϯaGGec B->wqzفW X4zT'5Y[6U\90o 1ɅI<MTĽq[UiIXIt׾Ws Mlo(/J*oG|_]>w0[Ánu꠼2چ)ybÖ Ltt#<3f:?y Lp0<0f#[WDQpV~N;cW'u6=:ꌀK!% &_b,9:ho-6<XĂ."qCVT'%L}k9㇧BFXR+HbFzs*9gǜ((3zHץUzޖ9{;sڐ,$R:rR5'LH<1"HY]>(NBaIEDB:0!S`0jvMIPk JR5c鴌ݞ5^fJ ZaMI튺Ŗjv=hñ܂hAA))@q F,#0y-)vM{K{D(&V^ wTHzE7(#DJNcRɜRafXTqj@!Ay IG1cPaTu~k78  9@"f⤍NjI ᠁+kXf2'j GsSb V)#/mfF>nRӹդBnP?xǺI /ml+c2.6$-1a>UNa^roBytry7ƟSia{gb}H*&ƅѳkM킇\0Fπ>_gdgY0Is|Q?OF?90Ԅx(]U$Iԍ0cXÈNbNMQ+;9-f NPgw%P0 xGtb@& -,0.fKgo`#H|"vX\㧱@Fg '4-V, -F^G:>%+ D*k P,ͦ/+NE+%²eS'ix jFD]¨eLgO"طEG,|Hg@fg}o?Cr69H&-+>[p|ЂiH~,!:8G_@AGM,L~k@Fz+L起6&/c2p9,bI + )eAw G.(F$NGNC ęû`{Kaί6,: |᩻o!P>?MhрmP9 \`D$J5'ׄM Ej6u5Y% L$_4$IH\940* RRN▁'NtHHm]*B:g+=&,_=vqA>awS:>][|AUDD"0u@ S0􍓴'N^Y!Ty%}b=V-͚-mo<}Ӌq8xT^a&NU/#$࿗`fލ/fI9hkcdkl I^Yky}@%8?0#+6A${S*x[lllbONVEp9H7?dy^&. ǚ]d-'<3-TPZn62 #mxȾ+#Piz9,ʈQ'doKt^%6 sR/cؤtlf MK BDB ̾AmvNM8+]_lxMƊקgK0{7i4%^ZXl|h%% W!p8b8q~S*f{ Ӌ%2Ȟiޕ6Bi LyD^X. mm)MQodE)u5E&"2΅uv18iPݬ{ 5t>zf*-Dӧ+t yecg h7OwMߡ K<:n4Ky)4!dQ3΅d]ԉ ]@8L1i_yڢsB87AA@?vWjHTd9q*1Ѥ_V%Ry% &{ZUԬ(m"6p)r/ ѩcnΕr)gBZf:έƹ;}p A<3x+f!!c, LO;|ZNIyR^9\u$LS;^3Fw,P} _r@#"I!&kqRKa$IF²IMAjP+NIk GfXR֞{neI3'3) $8Eҭ"q+IZW; 'I3V!jZ^ix<3{ ^k%sZ 0-D2 :nMzr.WD^|P`qQ1@#bb"ttHS2.EId%BAdtQ=`w(ܡ*sP޶D4i,;}H̖E@Br4#6GK_鯿(;.6J]MsiBܐ4 S i BO[GTc2ɚ"f2#K 9#z蕮kn^Zy$c6YWҊ2Abp.ifLb:9I(|2n] }ߧ-c4- #]O]yu{~9K[Mݪ;tt"tdЛw4ZvAZD4_ok(bnZ(xZ{ÑQӍV6YhΆe?ͽͩBsќz74GHRQ( '(_+l?o1you=\+w=,]!-z&|>:y"57:ļٯƧ&j@N]M~:E)&07^8!A&CGBQ'}0x5nmr$`mTo{vx{E-A5f hFN/nuwʌlu6F o=VYXf12Cpd8̉ީK2VX;we]^GO%!ו 3?~K }YҨxzb`%x9B٧.V'X`Ƶ5&.Vk˫^DRIkl *c_j-mYh~mPh:~hWn ΝA[}ZBqڧomӥg*dA[j?Td?Uӊښ3f'fS\'a* J974sgXC+~qׁܵ\Q}[2eg}0' L&q!yg?~&N4)LDz[!4śVDT읺Sq 3p vp ]XaERJ-P[Z;7q5jT/yOVmp-.pK?M]_K;ei<8](qidЦ+rfH_։x8'eF̽~d)2ZJ.ㅟL& .&SRs78toz ّ GB4c pU&rXyv6rүENਭĹ|ȷ__(η>ǟN*'m'>?e?+W/Jp뢁["ə |rKd}_xO;rt7H{u Mm+iىJyIQeI.{IꐮǽݱmΪ+ nOel荫DD('Z u"GdD驒7`x{A tsSd8Y"K9IvZYҦ$ Ib!k!:;f\|Vh'iE/v%bV&U\%CjD[SvDJ+9s\3"i{rF ˬ xSnspQ i W:e&tp8`kUvHDH2%]pJQ`l㾁,Ecg 6hvnrWgT/_Eg1rl2Qsv|9A%楰E&SaJU=zy+#wv|Ow=!4Pa'9|,)\xewD-IsqFmv7q{t<]Ñh-6?mVP6\v=v]{.LkVzd| iN͝~~{%R:J9kUJbߘRF'WLyb%TP.@i#*fmHLI:s [&#g*Ֆs>>6w>N4wn<6_noWޏf7SS4fӺwqpvD8ϥ6/ rC!g Y1M2E*GWzmN\;G ޒlzm6V+sp8q1g:q٦βuB IoF~M>]ьכ3-2qM仯m{Nu84p7c{~ΑaVdY7ߜ%pY,Ti ]2F E$V^nW)J=pE ) cumB8,^V`<[Z4b 8vۖ}k&__ 1fǘc5˺-|?1cuLLV--?UL[HPUCUoQ/Ra _"pEVҶ5d\SlNʝ[swu#p0GJ߻rRsO;:9ؘ Q0WG{0ut;]ΰ#/iHٲAeOƤØw>cJdsV?ŴS*(번C_E_[WЧcsf~ ؞ɳ=A l?Ȃln ?@>깷(>#]:3ֽ5Q8;9}6\c3mh5ǂ-3߼M/aQY`)5?6-^qۻ;h?n_]g?=cDVYQ?|gZmGgat~=g^!'@~y vdR'G{6'TɬiYnJOy3?bhK}2ka")uk8?4Cu_,hnmg<[%t-h˫to~a4Zh*r$*KVDA^,6EOF _zr~뭜½VVUޡcri|"[|XG]=\+M+-&ZVeNE~~`?XKp66Pb?5tת.:]K3(mn^Y R`HE9) ŠVIJ7k,BXq!8+,0_#FkŊk۵edz(9N;{rW;}~櫿^_\r=Exշ2I{[MUumSae.I1)-?I;h+0i֘u,Q Y>FYV8' z$tIBjS$+{p 0!2C6UQʴ' y\T$46/2@g3[xfq-RuX+&`qy?:/ħϘ,`(-YaH. hV5$ANsd% h' + a\ r h$ѕT-5, ,%暌%EIB H(U Z}u"@F"v Ƚwr 6d=JD@KXD7|r|+/AMy5{9> nCrLbDFSaL9oDjJD%Z iJHbCZ[cp D[[]\F_y8-F/ǧb&jvs[3m$YfZd^s,No$GӐZzPЖu5d0hY@N!6wt1m2"p/ԫKX-H'+Ojhb$b/YXA UK=Joٲ*dÑXsM!Ocfg2bRLeϢP2VD^@>=E$E<|C@T˒c2DK> 1+EcbGA\ Xx?G}Pk;Ռ1?AYRZW F5E<&gR-NHM8CE~e rB7mB[-,͠!88hqpF2fH2!{HR!{HR!7:CJ )CJ=t)CJCw'\;bVkM}.);"ǟ)f .t+em(\@&i> -s*5'(lu$;`@eo38r&ʿrr{5P֌LuK/`%6YzΖr6Iwޝh=a@c͋6 :ײ0 8e.٘2s FF)_(-3QcLu11e +( PZʗO'7ߧ`sE): L8*rQ7bKxsIa;~Z ;\%{Zpu0LkCSQ***H,&3"!8Y%frUSI#"cp"d"|Ȯ‡= 2l$G(d AslU)J1Q)-# i ,  uP7U:laMЫi39rI7[\12fW΀D GY$ &l{hq˔:*:b(ZFhC!.9-C<81im^0)V%dԬCֹPA f'ׯ#9FRYCoĄZ>Bq)s13m8C $aM}f}g 9vvfԢL9wn#IWtIW+[Ϸht<Y~|ttzqgűЅ'=@B6t!uI PU~'!a >>$O A"DQ- R˩{Nd cs X{DZ1@65U**W:D+*}-JMn;< nkCD;V8{'M3/PoH5zPFp>}ޡO9mH?=@]MΓLRS( հF@݀{A`ŏ*gSlڵO~Nl_d=59{mb>4Uդr_ԹMcu>:qBgy5qGQ N5b?.L׷oW|f'CΟ]if [ "l|*0ʤcJA!U܎К֢bM1lr-o Zmp/?WITJ)/dMɽȼOJʾ A0+_E mF>[>:̘DnHk4ْCEQ'x3fWa';bC&UEw#\9 ˽Nm |d ?=R^fᝧZJM ZY}Fi }J ])V)gRcŔҵtFvy6$OΊF>+0E`ğ_tz9޹7e Ǣnz8B:10W3WA VIXbN*-TlD CvNlT[ &g dClA^@|;jB] Gv9 H&ܺ)!cI$mǔ-a Nzo7#GFV-޹la}D%7A?{4nH: :LvQ<ⳇN߬ۛ5 7/_DFN16(\FG>0 s5|2seoocp)7ފEfu{GB84ɇ,G kH}*T- ]l#C)_l r?+kcVNJ| 'X 9 X*G`@yˁ.-LO[yոhMdMH^,S+X}@'H$;c?pRA sqP¸'|F$кǃm?*`'rB+\@3)8ecS8Bm-a`gaYLV@8e" XLv\M 1A9AWv l#&GYA[#>$?(Ύl ]P(G݁mڭCmai+v^,F~(Rڶwm;lڶhMOOf?k>vr}qy@}0r30g;5<&TbE<2SߌYk:Tiu|~}s ϸkjӜ%qq>kT6I[vAVautu:.-ăn{m/GZ Whb1]"frjqVmJ;Ds$2u:Z}kzqLͲG4Gޕ6#".i^<Y?U+Sum󕪋j`Er>\{T^`}ߔ5ᴋW7A-oH3M@ $l3e.x7I 3}W_ښ/i7+k/e}Hx/q{=dc'=LgoLF ()m S@'EHJPgChJq좡R^ Z~[jaD-(*iК^@iG<YqΜQR-yw6wM5x'hoLDZlV(8rĢ#x=%*M'̧QD6jɂ$ H1s2YNI-^5v=܏Y'Tr%g~x'} R?67iZ)|mӠlH%k OV}׭_ܛowЗ2m/5BsW/̻aѩ_WF7@53 ^SS3Q*lCR!.Ӝ-*m3%BOYEpj1p SoV՟Wϲzuetmh4}ckLLLʼnS^|]N)!Ȃ%$S-ZJ١ i=r#dVIsMwcH@E€Tʪ8OyR.Lҿ]x#M?f@׭>zgwh%5Ol"3a ] [l̿&tjs_ͦClI2[G=7>y Z0г9#G%{϶G.9lzOJ4_6=6o{rm^3%RΒȣz•WV%"`'ހ…Ly}HG#e?9>֠ˆnB Y>C>M}Y|n3y2qst~ǃ ݼ|n<2l\VhD;{_MLujWhYe=nzt7LC*:eY,iplZpP\I%i$ n rϐ cw{k-Qh`Ϥwg,z\v =0{`u<3zP2(hda!ZfCzIM F i^uImly'.6[q8uSG'R O<\0ώ:VaxxKTl!&홳bGZ.,;uҚ.Nz\ծ҆~sL8kXԬp:V87p,V&'6G-l21(|e)AҲb ;qԇȼjϙs\`Bw)5xV6/3;iSuU7<s8j rͩ^گ z=^HyPۭV6fM2|8x@WT ?~vhZ ` Ng$ap:`  \%Aٵyi -z~?=Hِ,%%#NiC,T)/HO~$lN+vsWh>ZO < AF\ PeT%9qP\SRG 8JFEiP1ZrSkԭ*R[V{+uW& O[J-'i/z݂T]CTXU)eq;bOJ|:x |J lf9D.= XP#3F>g>VX )g )ۯ#\$_aN %&F3GFJA%SUW] xWЉIQ JK5e,hS6 t@(@H(zX&4thxXae{6f$ymu_˾/NɥdoM)z`༷ 1:3#s;+^scՊ]7]LZxCHM;E7v1(3s^R]Ӻ"@m06$גW19XH7 )A赦&5#x1bWS䭧ϧw.UҢ۷伩3:/,6Ԥ˞-E{GLZ/[U3*s{Z*cbI#u֗. N1 . fRn^۪[A;/.6i1B} n;rUVGX9sJ^AF{f6U9L65 7z=RiAr.Nf4)&OAo84בs5h˯Xw1kE& LD < 胁kLH^9h4PE؎Mi=zioa'Łċˍ:*QS;]]mx 9"'9S8P$իQK[.'J(EO #EuEAHdcmPTMoG#TOF&uQ 2~ћCQa>M JyZK>^Hǻ[ňp^D@\̐`2rf+̸UOΈL*P\ZoxO /\.N3PD[f5`tY7 Ӑ 1NF9 F%tQLBA쀹k 'r_bw;*Hha0^%Y=2 fsóWgVWPG0cW SLA=ɤ֦CxذYqm5fKZKZnc tp[>4JRm8FRQIZ7\+џ9\k>qQkN4{fف>9L0n3ۆ'Rzٱ@m} hhǁDbb<ݞ/Z .,;uҚ.Nz][oF+>%@hb .E%AvA_mLSMQ$K,S<-j6uXաYMm[V('Sގ'F?^6$MQs &` R%.Bd̦`"!Rݶ?yWF3x¦U']s)ǥ}KK2՗K`R[CJ,[NxfO2ڻ%麂p]8;%# ;^C2ѭ-Qct tDZ*Ȅ&DOxn V#2 P'=\;;XӸp{cFp\}ԖhI]tj:{r~2K?TEamYݡm܇',3;XE(Ց(C[Xl(o6z yԓ=ɗd(Dku݄1g -Z6Lm N$c hJ5j8TP\f x9OjILX  %а~quw$_-ZY} **]6EFZi7~zi-״"  $12 `2 J 1Yl t&B)^UCҜ6wP&IQibM_!~ڨSlNs2)Nt=>Ͱ3䲘"ĜVA||Q^tp\lPT0Je5dC(Ϗ? ;nȊЋ⿍_uf-jO^>qf`v3Yi\ *5˴}>Y}.MSD{i~.'u/_$$sz?Gxp-AjB]¬s+` hRw.\烲&.гM"nCJ>@I<jClq"ݨIszQ^!?jc@7./#MX r0dgrv甇3WUpGg l|m'v`=uw(yT,ݻx 9 QgPPuތ_n]D5rg9dUGūe=?)ųQe}og)JYfv4P"9Oi˹NUNtW/ /Wܸ ,  ×;U&4D epuex$ލ,/K:aF??;/~ k({bےU,Ig'p[U4У!#!B`DE/E2[e^`p!fηD=z{0hnZ}>PhaNb$x"'+AovYvn{"Ic,ђAm$|RA;Bvs>Fk +6}aV`#UxEY\&El: .lԒĀXZ-E3fNeɞpCILq۟mx*='$Y@n;Ǎ4ᣱ&A tFEL1Vz9owlC-+r Rw+(ڕ¿F6&beoNgEvշ}|=:_7%^5h/'`v:+U ߨ-iǕ VBYWU6sK*̛\ sD#8j zQVDYŮ]k+| t-;i2P9Қ\4W.uN)E`Jm+E4ޑR\irC\ˍCVX)ake+I GOQ$VL $hT,Hg@xL>Թ.(!j %ՉqY)gg} Tҿ` xucfظ'nCwa;6'J̳+'.9RMuCo7Wق+]Ώm?Nzɵ]v>.H#/[;[|4aZ#{^=WԳ>,KWʮ/ݷo;ƾ p݇ϦS~N䐞uoKB.\!>olW[@:X{p&f];IJ*. kXM>6}G( JƔUHo1Iv4IKm~uoLGfL*uMi ވ={ڶW97Pr{\GoYRʅ:O 8~GqLZsR !yde`9;uJ?% ؤ !x˥pݲ"lDTK\3uQ9bTJII.'F(ɢ"]ɮ4\܎D91@ ,q-<+B\*A) 9/=1> BqgN] O1bc*`B4h'^kg6>P\|o' @*mt`Ctꤙ"cquQ6 Õ8/l|VFXBu!NNTH'7bA'΂xzXuς^(gMҁ7Z+iH#J "LH< dn7qpf֚8Cd`嶬C,*.]piT0Ց(CѽYXxl(o6z yQ/zpPcZ)mA'mp"+dFgLfˬ/ڪWsqN7K o=[fe݋"!p$@E++OA[EUPhP*+3EMxAA AHb"e@"Xe,Е0"µL,ϸ@g"B[E:$q`xpxe&[`&fICd)QٜcR#j^?Bp Z8UN UrETVqNqf`v3gƗs&Ԍjt|/96]?n\$?OnWH*EQ%X*ppi8sfW[1S@]||w: P5q?Dž?mlt(?9WBO:hU UDqF,LӋzUe aP+Oڸqy!oZ Nź^Gg`n<$;K]53WUpGg l|m'v`=uw@R@<]*]Džύ(}v5ꢽܺjjuDO\*#ղȞ٨ʲQ7O?t$PE]7 ԠHSågErӹYNW/ /ܸ ,  ×;,Q!~ ̊s ϟ5E|»eIWg |oZ\A_s EiWlcJۑn ,FJ3EoTF DPRZ!>K1&˼#~EFz& zQܔ}J x@$)rJth@iG9ǭ `R D"r6Ru6jIebJI, 3'2dO폶wJϦ[[B뎝7 yn@sJ>{]?*޵q,ٿBf~T Er{o,ղiI!8>DbSI`K{NTVsTDa7[BjwX}}|GLoH_4.F3zÏlx=tv?{׫ozgˏK=ݛ[ t`9+s֠Ap^sR=fݟ9:ZpNk;`t5GKN `F~!8ޅU5eDYۗxDIw=BHs(t@%DZ_+.Sᰈ|(R %EL̨V$SѦ"\.DJQ2kT)+Hg7\V_]ӧ\_9mRE;uI^4icU 8TB/5B솚vCASNGSC FAY"لإR*PiY_M1O+gec`}m~ն ]_BeY:4xL)WJ{J:h r}` ޑv OB7 ؄R>i }]+mK h;Wh}r[ͽG?&[tH)4ya}cBǂC!k|Pڷ|xRc,LN/fUA|Ժ Ek9J )Yj?]eq|oЛ?;載% Y~o:x# $BuE΋|a={V6CzBH{Jڤq'lֵucȐuDEӫ5:ݐo z(+p -!^w9uĬ[Oh1mn?@mP)MxOva=~ގ[o?־M{>yk[G!/r6LϘNNToA52N͇>lGjqn[(GсȶR ? !UwH7 J =8_:"hI(RJ&B>Jwl mh~K; )enDIk | B"Rh Z%Ժ櫙87+ = $7ya7dLUlUnؽNR"/6E*RmbR0}i2;^NeRrrJ{mI0}q<2 Y[(NM$a|P,m5YAH^T}wT U@ɹ MIAx6tc=k&Ξzvzaʶ,T>}*Y̳A'ehe6Q* DgKh0{'^Z &RY)KAXKV+l  /2-;PA4q'=b! {kᢵ%J'PAT$Ta@dA"o-Yz;2D 1vz I5m&^H`ϕ"@0LjVU$VH΢-(mk#²V5TyAc[Si %8@S*"l C9Sr9(!&@8tԣzت)hGP0|X7wKU+_Qi$j2!aȱyRS茏 ۰ΘYNڡ!ұ50{[5QO[/Q)>0zaWX$GHRѡC elhQXAz!gڿ]e=`2| lmp^ ޾l?~(WkvUws#GGW|y^h¼k3)Ny;@6aD@"X<6$2x%i1")d@ɦXlәM%h6`6:5D6t@ʢ)-P,ԆG"(5(FNҺ 8!]+tJduݔW=`)GX PP'BA;<qN81ORտ|#SvI'ANbGVfia )xS:}rWpBWpMHQL(NHUTb6;@Q&Q4)P`eO \QٖMiW<6z1e_kW+#/^cO1: | ƕ?]x埫k"?/O4~iptAGq-Oy^F2JJEi$MُopFT:h0jq K[N4u||AU~k,D]U#߮1㰿kyE?>1h4Ϥy8s?%EhA<;O fv=-[w?8S]/.owsYyl(>.Dl&gφUˏ*dgd+TZ.8]^Aq-Kz:ZpNk;`tFKN d Oœ})]x\]c>f<q3glLr]9tC|GXu6̉oLiX{C^Gqm&9В!t~l"z˫OwGxg|!ym}1eo ǥ|Mg7'OG鈐ȚMcNc$U9W.Yb4m--bM=^vl*W|e]糫vHժ^H+` b  |Tp*U)7i+1(6 =Ñ%$2k[&TI&z6{6>&m%BgyuWov:BɃʅn˅V{@o&SEo{ Ym,ʝwt7tes-ij(LdVeJQd$h-A;yVxSJKTR["]ɹ(jlEKŦO޸\Boٗ([i5Ji ֖8{46oRI8XIT6hKdO`riOˏX& ^` k8Y ҪZm]-dBvCQ-HηclP% F'!շQ-tl&i vLM{vXDi:ڋ6_{d Oq*5{':F֜sAjF/.x袮\dxlݛLbzV)`1$WA:w"xyӧzɷ~#?ׅ9op9tFڣ %?W-:L\mq~C^ ~2*]9%#铃Yojȫ q~y=ZID8}s U{TǷh85ABIgku'A:pFgk#[e>AR>dW[Z*)aM]&gWoKYd>T1Ҋ=s9ť6_;|I^M7<'>BzӊZ xʢ3kWh~7/ylɔm:{K)tlG]M7Vwqu}c0n%&LgYl%&4WvgX [ߤ'Đ÷=k盿[HxS-@7-.VmpLW{4ॸ|R%, =^) zQJs{?`Z6i.6[5?7pn?O<5/SxMҵ\ulo@Pke|!ihdVQjIFmeXٻ6WJm!I l̗MA=mbdx9IQ̦h d8N+2iS!/u-~k~|ȗ F|mݱvÔ.?WzhzmX^TC%N)gK4nJq Т]EE@1W{7I.YZj='Ik `ܜαpm1j\Vo\-bwkKJ;߭+m $e7C=F$BK#Sm'}&TgƮf`nE^wSD pzGa=-&^:q˲zY !sP6lړ&Xh4IUDPIɜ; F O{L.\YeuKiឝ-ïDh#>?„{.PFl4Jt)*k_cF>{(.Ic 0g](tGhOMw_KͲo+*J$~q@͛IqѠRiE +1ؽ*؀d@= `-iЮPsM#`BY dr(j]v\ m^4\,8քFnu+yX$Ygs8hMiޘCDpX\E1?~B݂n!\"UTDa ,3*T6GMhzj\'s;k+u z^DIA#2`WV"w\ՠ7wVrEŸ~kJ 9wX,ꈛ5&\0S|14K@gSO` bXK9 y;XGgұK \V Dr4GSF4ͬAzX7˚u𾻃"p@9^~X)=0(49ZvŌUD1&9i`>b %< ]G:I "&TcY;s& ! כhywkW e] B!j!0pxK͡.Mፍ@8Ҭd :JrQJ%V 0 COZef5:#.3fz%<D. iՠ,}ڄLktqX1Fi /!d<ܗUd 2inu!ǣ@܎m}ácƂY,TGWZD"hFymV!ISƪ`|?nv I}w}4S*#h6F6= R=Ŭ<$Jxʗ9t IgmD e@GEx(%g6ܖS5Y$aNJ@P;ZP([|=Lm!Y1Mh#{Nw= ΃Aa8([&h]icmBg"0S(ͮJP(ĴcƁgUkU#Ƃy0+ !.}$,ZH'J sAnG4C:gF& h( `f%݂ڠZMުh{9Ex'oV Q4y5.a* 0dkZۗ 6cs'<6/V0Cu}i׋ղj#f,=;jT0ufU:9jѓХE0I0Gk'QEŨm[SQ֚Br$ZVO ޮ `LLyyѰ[ 3bO*Vh I xآۤ+9P.Oڪ;$Y]fS:( ,3UbF(z DeG`}v="Ö+U|v3a+Bq&SK \sD E˟u7(V0 .LʴBѢa6 Ed$ӽCbjƢZ'`ҕȪTq19Mk.,4RH 5k*M& 4/Q@R 9'kӞuZ;z*DES&ZxZCovg sk u6VP8kJ/lFY6Z4b@P+u!1f`JnÌ '9>X2'Yt5 L⊑4 VoA\0 6fTS.*w3NH&"ˡG@߂dK()q$d LBFt6b hD蝩yT]pB# vmnrOZioji ;3ESYQY25q;`e I< w+2*Z8ͦg`I TcH F'"ux2J 2(Zt%YX U*2@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X *4Sz <% (Pm@ZױkTѤe%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J VUh@0؋A<%|J Y 5*Tc%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J VU"[@^ @0W>%UWʽB+R/-@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X )>Z}z~:{㒶]OKkuwVVWG@h z `nOF'|TK_pw7@i蕱4߿_Vo黿۟68)(߮3/a>qzx}]3gM6p7qZSc7qtpF>3Z!SB*ͦr^*li@Jy\GVw.|`ߢ ΁71uxi5[9$[]u n.@_ܜ )ڂ v4k)pkc<~]\~C,o狢7W/6T7%ȋa^;t? ob.}x,i W?g$!™-5EW5ׇ^6?W'ߴK0tܯumo4 @߀ET"]m-f4~caa5(d^₀zrɛ_81}A_oRGoKI5rlpxy_Ӕdw?P :s(vk-u(]こ͡pЊ)jLB9VSAC`i1ј=}K\G~8,8To]7~q^_.^;#l{ݖg;J]gg 'rPw7cr G5LFnBO֛<OmK]mL78qɭ鿬ί޴ݏ7ttzwQz\ozKb ~/[nPt~uQ&jBz/TR=q;Ǽy?lf 6|҉-\wq~vQb2@D}?=e9t-e-5ܕỽ=@)h+dJ қq|=~0e޾{4v'P>8r|Z{>|~Ga~\]&?^\-_[hY].ړ(-rmŀO!kEps|QTϥ ϥrϥSb\?cHM_ޤbX@%1>tMICR*UX@ K70W!k9,ٜSualWz '![s ýA2uG@oWRE7M)Sޛn{{Dz;p/*ވĬqq6sQgECka:,{Mڜg;8~u<_O;|ه+T ,$j:gU4|U2:l.>Le= I۾}ZmXeUOb9fʑՄ,/&QrRRӸ+]3mqY5vdcF6/= PeH(TjI蕩8>q/>>Tb椹L=,&B=hIXѣlfOoeV$%9Q&豸*T{kθ19', >rc; fO˿H&On_OZ2\ypꢴ>?5(fN<ʉj dIc1K] ] '>GV80G-MBу8sgbQֿ <{Ѭ9LmBnTH;њ޺lYIϙ{hO1f3</)|X?"NIzM6wmm$~9H} X m!EQlCklfOMwݧ+b-@$cP&OTh%oxP")m'%ECO95d3XW4R.9Ӏ_dLf&tIsZ~.NZo s))* 3DQ)D p8"h[E-͹0vè-x⠂RYdAb\>fZI h4.(H"uHf+ieqI[0Kݚjb,rAa?猃xA 7aN$"}6,ioS!dl< eTψK'cጵW19T:Da1 0ɆrRCy͢䶩%=]9a%qJڑu(󂄮D&Zf0{GoTBј7T" Bm+"*r^ꜲxpC.tT=wv@6mcj bCb4iYd3I]^4D GK&F>%–<_ݮݪBjMrx+ޛyl}khwCKt!ʍwͳ9<N-Am6wt*R#Ψ@M L0 SZ Sb~`n4nR>8?"odŴX*rE'L{Gp"8lg8NI,o!(wIjRt^'#} Aznl $ TI r$ꩣO9 Gpj6EppXx5AMwNpo\KDAz#EjPQC< &ugaR?Mpu!TD'Frp޺ТPq^`:8TBуT$E( ,-v&Ƞ" OEIeQCW4t@~x"?{e[r*gߺno@b56ֳG/<%|oT\Sm`pPLKwhnL**6FP(blpj:|7'9 ?hD"$>2CHD:ei  ɀ9DS ĩv!ă2L,h5T"=&b ٳY_?|QPr4]1n~PهlLcۻ*^q#0 Z4e^M $r!$V< wNK4CȢ igRi6ruښ"uI{ST)ƄܢP!uk[Y߁iTlcG3qEY( $-Y 2|YKdGl$ $C},$]4H\N68Ƒ͏1^+3S4KUxs⊟[ً7"g>m \pIo.Ox IE>]7 :k9f&g>n{<|WHd֮ڡgN܏u Q`tY'7ܴRnĥTL$1&(ř@QL PJU s,PY UlG.oV>MUs-75~_yOI'$sdv s%$t\]X:xgxJ u2񂗟 ,*av)EԫLji&m et2rmr+)Nӭa˩oH*i(34?5 ijq|,?BQcNIgaɎ9y;3ֲLP3Ij-.7,P3j7{h DŽ1WLflgy-杲ҒDN:PY Mw7 >]̼I턷P@ne@5)(n^o]ȧ|`z$q>P'@نfoxNL~ >a4ߦ,x ǃwfz]H!ܿ,JOTj4xsǗ_V3cK`U2Tନ2PNv;崯@޶]@*t잱xj!\Oi6IR2d0R T#OD;#A)j&q$BܓH## ƣB"0  P S)r]h6۸ lFFu{V01*)7$Kk#68eN[V M1^|&qlϏMY`=+g *B1NVg\|6t3u4^&0׽:?on1V bFp:׽AN%M_!1qDSjmyPV@p_fɳ+g~ r%%zrc~dMh~^ԏzW sǀ6]^F^eٛj rڍ0d#rq$ ;{L_ľ?$䍣֣Gl$F:<2`\O. w׆~z"5SPy،6ٚ3^fT#}ajer?ɼǨ'ȿ t$PIYU T#KRwKr{LgH_}}̽!" 3nFu.sT?^iWlcsx᭎sxFQ0/0{F 1,B, sF΢ ll윷MUKϷ6EBs:Hmq,NF 2r;n{"$cj Q.\$ܪbZF!6Si!#v5 y "Mf7$xQ?(2A ꝫNBe*1=c7Wl=g^9\Yve9H\kJ=!&ji.*Hט 9jig{`Ci_ܝr^;'צOXC-EgvɌΚcJ48KryIP&s*FVƆT#E6:$ďZHo[U:/vzYٮsV.{?>3kv։<-ʷɹǗՍJ7%)x} $*,(z.-5Xj>SDs)@RR(N^RR:L3 kp VU6F)D;MM.x뜡DB0*Q<1gL7UcP)7Luo0 g18Y9h`31ucz8s60skt' [Ifꪑ͎n$zrd?QpTGWa C۸/vۖOK@ug^r? kt`74Ax̺vꎶK[(6R qrv4SW@2QdX 3y`j+·F'JƹWN(k8XƘ\tDQN3i%+XķUHgH}{]m-NîOuAJެ(٦\}2+H]D+^0+9.z d࠼!:*xV,Gb7YG)M%'т4\;OehK M ՋJU5>X(gkN= Ǹ(l /(4Reb̷ itI&K *6 4wS -y.hZ\<̋}va7'o fL O&ȹdjn;OR(%;#ukg2lP2(elGTWԒj4z_?fi~>{1%CstxYV{/}>FKHjӻ Zf=^ey ̙\U]?L&\'ԏ~:qČ|^\U47VO|df㾪QweIdSRv. ˬ=놑GE$ն7(6/IūhRVVf_dF~ĶWKޝA\=/YKSQqz_GM V ?\ )m*\)5ObX:< pi^Wi>Q 9rmӷ-졫`rD檠P"Dп(niyK`K9Q2Z[Lq%HH:3We+k%Fftboe\2n+޺z7;>g|*`C7s!1 E<.UBd rZ9 LTԑ2L!b z4z/"MS6Uӗ"3C>K^BDW Df8r@_$#@aY&grVkP+N]IG$޸RJ֞{neI3'39 d8E"q+ɗZ2vz w[qRێ"/غm7~Gu܆VY2!n4Vd]gRq2!1ZiI*y0L hBN[=*"4ѓsQD-+ߜ6s6W FoAeCzuq):LJ%5؅(!.z 0(wњU;TڡEIq6Wz[EO#[q2OHMNlB^Ԛrsk_~P)YgaRBF4^!@ V!Y%Dw eA#B1|MzEt%jy=On7!콴HlZRe@~M2ET&]k#w }!lr>9|k5N-6)P8S)E(m d06e8O"9% D߼D<,Q@]dJdJza)4}GP,O‚zV]ƹfA,Ɔ0v%ӄYK]K!)/IK/ HʿBr'Zo)"[S#~PыK'ݵ$ RUU`6wcZdGJ~ Yy s'qxLh얄7&:Ba&I}c(͌V?ߏqEP^Ix,u\&3%H; G* l!\} L2'sNz. ='$f&F_NT2X5),x͌)$A r6fb$L[`f3f}⚔"*ST!姄^D!8'Jewƶ3lnuJAqW})vFGZ7z jW"衿ɔ`mO!1CMtq~i{K%zE{Cmϯ_JMVyc15#5I Td2ɝK-)I1Fkc(ښRTڔ%dz0:p /ٜLk9W3-ckm'mK2j m˶PwpѣnDz鲁X} v4`[2f> &CR&dK&S$Ƭ9bf6bŖ:;%a64,+m| !C8P%R,jFM(>!eX mhg˗u:阳Q$Qd@H!h5T#H@\pm:aCӴN:M;]@)IF&SM^\gpT=؞l5z{dEt{'S\D> VzH U崄SBJKQP FrO*. +͂Q|TՙE(\<X8XO|ie;\bvy:\; ݠW'VN) CN|&/{v+թWԶas? Xl۶aNnZ__mpYsr+K[3ތ-lwf!&m";oKH#y'--=emɎ6_s7 h4S>svほ4Kl]{GТ1'KVʤ$F8O[4wdzMZvڅJKκt tҡ}ho:BnBQnpI k_^7-SH6EM?Y"Q:KIxy M,ăN3<'~ 1S4N[Kh6:Hzݥڟ(՞"S2_/ڣ7qUqBdd.l)wDt}κfwDtsF5 LK5 ڝ36pMS{k]V5FɎ)!cJ11 l23EF 2\rm@OC3#r! ϗdNH:Pΐs 8c ZugϜ؅5= Ίץ'XKqSG'7?MD5XxS%0-/ZtMU sx% )̙Ĵޜ"h-E<.F7D l"OLH.zgr%Y8x=/8|LV,0FXaǖlܐF'E@-UCSϢsZJ,ʇmXzuCMb!>-F;0E4!G4WTȠN$?yNڏQ$A@hN@xt:ɕ5kdr&5 GH*[ 5lfP Y Il  ~Ql0~dڮ_J泬^,+uM {>Օ5qRqIY¿&8դ;,<&EYNfOCS$RC ]TsLGQOkSR QW8WFMi*}3ֽ?B#u\z}b]d~[W[t4 cezòXV_nq{ȂHֻMMMF>74Ϳ!At^zɚEnziۿ)E\J)>i Ƥ\7U!)+{"\qK^Yvc&Ո]-%O&xOW)+jvmjvMXػ$.=@/3Q Nkb2u葯]E}G=1mYdonJt0)]3 "?A$U y 7;uNs5AQeI_Y_}$C{z6m|_g-"Ig+p{oW!Z)1 dP8}Y!v"F{uox۱9j"A8<NHi "'tN+KΔ2I;A 3S,"d-Vgnj-6|V ěHwd TWKg^nwӵ1`K>С'8M>\E>I#ܘa{rW0䴯jVyQZbѓd|O$7Fr3Y*h*mr0K5$V-L٧Hat~{y>V>ˈ St uLˎ87M7Ȃ jv؝ wh+FjkQ˛#I+~;\GKq?$% F?eeI!);NUϐ)%)$, 5=+ffa6o'+m7\^ܰ?FwFu|Ra6n7,_\]Ѵk23PJxx{+ls(Ch@рdqFAS֓B'vFígsQz4)§0na9ă)ུ $C=2ʌpK$SNNn lxt]ت~!,˸g"͟`6RUj󝀃2]_ էwx=u%|,ЉWPsهIW߽_|^ٗMoS_S'[:1>v#>]bPs3݋Z1$ [~ơ挃RW3ӂ 2o;]p@(ucviZe(8k9 %oξ#2 EY@;~~I`r{2?wCM\q|wI {v vRO&uX$IVBT$Y.ę7`SgL h! (1/HQ$/}iKkCٰ|3t>Rh}?P7mt7?2鋚tM[3sodS]v[f 3ׯgo>_Ngp;z-4 $/pQ}%2kt|M9E7:JҾ4fO pcR^AX8lp]b !x%\7V/u<ڕJ2XIL6SV:(Ojө 0u9f矾f0wZ/|F57$ *9WJiaOn kWzҵ(^Q`l㾁,Ecg4>6h7/]D#bPX"vd(L'6kdѨm 鰥mfÉw(D[?%t)!O|׬ w+`\#wқBpL*Rn 1aΧ&}Ǘ'\ҌQN%RΚkn7IDxbI)yt!'ZS˶CFh2^LocWS6 ّz㕘#*jxPY&JbWGά/򮯎Y? QF@h(˔49g/ )-B)n&T2G]"T"3҈I{ʹ (+,'Dk⑆3 %Uുd-q6)e|QFQAf' Q*-)jU!OӊqLO V ŗc.:IzЈRRHz\&{a:6&+;m*Vd^"G+Lg)E0"T>;h*G>Oc̹ &(/0^ZG@$0AQvi=P$ oz;"+^qr[{Ӂ[^.A[3At-e'^x)m>5hH䡱Lˆo4g”8T귙R=G*y˧|NS;?Y@KrB#/ "{#.MdVER"HCp̘Nܮ7Ԕr"+^l3n;w=sv9/A|q^FRr!zQҼmccYgAʨ썷N`LM@^/WK673c#]VXo0 Z q9 ^Vc'+RI^ {NU}탑NYX5uzEVIcśk?E\9b) X׃OOg]94~FWflq +O)B2̼-$3oIf&7 tSL|SEfze*1$Q11cMI^yC<͏49mHu{M݌L1nҖV2OVl0 88\UTp֘cE">8,s%bm(E)ӟ|Ni!ۨޜ@h.eېu m/A&l07* 玳!i%u)K˙EȌ<&ڑL+߰]@уMlm!nnngv`>uZrN7F?5?ǜo7ߜlKz ߜ}Ǟ<ȵmqa[2t?vt?z†]Fp{[$\tidcUk.{}QaReGT9"`L:!5܊|=Cf_C]vp^ĭVy8򐸊.rNg$B*$N I||YJ Pvl &OS2ڕ$ʥ 7v}iN tOv}}"<2>?}|(ɬST)<ֳX?hQ2t36=Lc3ӌP7 RH"E8gsNZJͨИ`pIқ>mQJtmp9!@(m΂@H'֦&fX|1*%_.3Ip~~#*9oT;) t̕2euyOԈX7J3!-3V܆PͰtP,רZSA:ȶb2;ȜQ:ZFEgOzFO])Zk=72ᤌSPr"LvKKÕUƎ/qp|wXl'v_l: n+6VN6aZXimpHŽ˄|BCk%sZ 0-D2 zk7#B )2Gb|Toʥ ůE72!=:Oɸ&RBȐR< :uTӐZl%NލJ`jucyi/u[ QvL7$M%ފrg}h 3Fq#E 7 mTC7A7D A#B1|MzEt䥀jy=On^Zyd6YWXN)2A\&"lur*pP`CehWM-v>pH_ =NYiDߖυ~K:wӧ^7ֳ.4W{ttt\3ۖn+6puUD(FX+,4P2 :D@"\H=7$RHɵ(sAr'@aQǐu➀=#F_ȵIXIR(IJRbd@-{B0Gm̵ث=B؇j#}fDyiZch=39-=9G铡 qЭiӼ)>{ C_4GoGY-"_Bf#(ndMc 2iLA;qOo(Q(T*-iEiS 'Q.ya.d%KM,Q^#0VIkMWԹ ::ՑN؞:N6S qژho!P>v(#)Ξ ?᭍k4YNyLGg=VYXf12CpȔZ8̉ީK)k Bͣ0XK q͠c!2! ^37>E$DΆ])(F˴ Vw0ZF(巄^Dٍ)RT*6޽VK- .r] WQ'Tӓ'',f{|*=~\J&h_r&+ R`4h+yTֲz-Jb j.]]>?3{];I]2 C|L (nSjk}nH J_. y?TsrRx皧3E2$%Gz(> j(QlAcn4@$R:"J N.rkl;Pv68z#*r! 80 A ('ӁA Ҙ1;ge( #$$G(pFq*| $H[K!iP΁DFbR2M/iOI{h\E:U-(WA,g $eiu&vr=.CߍхKU\ C_Ιַ+l#Ma^qK*1%INe{إkscգI kmdk8UqEHX5.fv^|@PbY1Rd 1Lh۬qӠcקгO<"1z2urOګ'kC+D 8=OSap#_!JW-E24Ȏw/iJr Fć'Ǧ2Ոj[dӍ\<Lw0x-8NՉ^V XK֡wis9է |S<< `\0@̱r6b<%2@JF/L3Ã&ߊL/׿PH*ݻڒK)) ,S9.kP- hlkP'0+H/V0#*)dFRIJ/[HU \S;[lK>u0<3q9H$#1ILƳpJ EJJe *1V8$ }69}3}7* =Rp)P l"^06{d(Q9D}Y8Ϛ!Sk x$ )[X1c2b=h`gԴ;j}Hc5g2ucOy I˙&ʱI@h{%jz d.&P]]֣3)V9oj)umݴwfT|慑a2mo~v{YÆ;h%6KLMlVSoH nYs]Z1kl2!ϧ.fٶz|sMR}s>>otW~tpcf9|wELJ̽,wK17{F0 )4XqN>MCІ#aq9疣J>19nfw@*uN ͮZ0ʦ%hj{@\3|50aG$!^ #7JZ]tcJHwѬt*?"h@Q[QFGR 3%89%x%zrcܳs9)jWr_{OY %zSl&l |7O(-3~&FH=g\ T?K2r~IY 5FaJ&m6VVoSkG=r˗%x8V6 YiX NXg7=| qrMR}u؄v׍U_kB)00ZR3JAR鄡"oO;8zv6ojvm3 50o԰7 Œ9K H '1beP;8-c13Y-B ]X#lnuP*6h[g-L;{Mv*ME͆6է0bOfG>:YG:;O#srCpb iBPŇG9bLuFq!wdy'aZSYd ~@@T Vo7#CWM 5 zSq]%a%CRINaCbۅmNѲrؐbDdeY;p/Uup{%K@oywoW+Lڕl!۟WO ?9ܞJŶd%>9jĦ&'>D_yuKds_G5GN"ȯC䧰BJ)V.5\nM #| /"QdfZ?$˥x.IvQhd\ ߈L4iZo;d7؂.0Us z:Wf" sT9ۼټ! SA 8fRuV73S!#%ۗv8-;ɾȷݕ.H{6Ǽ]ȁyb@Q"y@݌us]|A]nRڗ! &ȞB{(l; ڀ}.B=dސyt-c.mmnXnmغ1<"I-*9 ҭ㗋WDwu"S k)W;(tOzޮn]6o^`3 VE[ȳ &rz.DćMT>S45ޕ6$".tyoafXx͐G-,KI=_dU.[%K>,R񋊃sC,DgruUg&SiDފbdg0c\]-d,fs>Ԭ\}ۛDž2٧Zaw\`C7 `R =<3@n"NO|ݒdZM?`N=~&TM?ZNcU>Jq^?쯦~[j\]}t{^&wƮϙ-xpƒCv}8YbT~][vl[OI#eR\UR 1:p?EnSFE93]vJn_cΧyKޣp)(=FN9;_Ԕ B4`e/drD2d*9m;D4, RW`e.F]eruF]]e*k;TWjpoD~<3 ~Y" d^H)V ),D)#G3PgC8`\Z(i pDsuD;R28}0Qٝ||HC] ?&Ttմ'b%WwcnZ׬F]<}&_>xK%FmdYbmFe%CPVNX#x?Ӡ քٺ_*OB+LF-IDcT9@ sG9d$9N:7YtGK.5$'!/@$Dę;>ct tDZ*!Dx"sB#2 =5Ӧm+iEnop{X+KYtI`M1ODכՊlKF8e8ejk&`ZKІit҆ 'Bktؓۊ 2K4@{[B;`AUTP U2'-ϯ#x|hIb"e@r|`[&4?\MQ%x9ZAjVJ[=gqݔwƉM 9 >*M oeX([d!PڨSj~SU Lũkm[(v@kYٯ"OP턇*TOsC/m?fO'*v8 *1tq 磊 ~(W-b,CcKax]GV*M5zz5ONw[li_A٪`V%~rhʭ~n0-7Zz!P{w ޅ<(+ n׬Ao\/QBk/z$*' n:zIv}sC?^>?ٵ#kD|Ze܌!#+jjh] խ.[+7Ã=^'x=\A{".E֓KGnf߳~UOy"Q Uv9=tUQ(oӟYf+Oz2zt$PE]U KV:S_S&iyr{rҮY`.{^Yhfwxe*+QfgnpɥG:j'F^vx_/;:xCnRآ4.?\)2h𑋆VmYeѣ6!oTFB"()-@BoEhe^ 6B^ =i(hj|;'$4UZ7/t6S"l"'+ղhy)O$DRQgTDKg&oͰ %D?.揃a& 9Ehz=_2th[mnZ='@ n/)n紗ˉ.?@LTJM@D-f?wwpX^E7Er=x@n8g.I5gX=_~9wzg:|N:pft:Tjzɶb!:Pܱ:-Z1 B2*ZFas2-:4 S[=_`!";rᡓݧ*;d+j͵0;Ogg}sU諳|o@[u.>9V}lN3ߘ| lw.TW,9cXԎ Ru[xh ]z&r2v^[;MQki5_WkeVN+k;۵F^et-h-SXZ1ihr&NMSEB[ 8+wh[(hCsGD/QQ>l(俜SkB) IAQ%y05x LpD/(18-(K$F)"YS3pPGϣjq;YN# Yܺ۶'1loHA?;\\}tC}W%(xƚO|~ZS,=Boш5 mzQq3y#lCFȭ'j).ܡ}^Y뵆21iJ\TF]k&X}-voa( A ʨdLIQEX`$kG#$J)$bw&Q)ML $NrT*g8P 8m7FΞ}](o7pRtb 5nĵz4iʲݝry'(MGdȎ\$m4ʧ$ڐT@( ިKQ\8%8l@}2j͘ ;*5,[Yo@3|Oag]|qr\)*(WDRiyr'K,ψ e_6#C}+W*7Bp`BB;/ ,ĵ]D=JmlN'۶ et |&h9ix^q:x U>Rݖϸ#ӔecȅwG&>qHXpёW:eayzI<6h=fᘸ3樔q&uY{q fM:фuZ_/ꝛޡ{{'u\5n8Oxr4}ryMC'Qnm[01(q2g 3BMDC^E!\aGS<'&g͟SxEC#orXNf몳_A 9iujP{9f&`+. +oy3L-7$S)y630a Zq^/*>4W(&ypI"p0g͹Bˤ5PRH !rǝe Sn1D QG0!f._c 7@MrMa:H ^}x$KADs4R 0M*_,W%c~hen# y^jgp1hxrn#&hO et+cg$c#IZ~M;.g5֬zgP4O/$㊩5[:뵯_F 8!Ie x{BQtǬmCz=o/:xʚ*r\ -Rwr撍,9͕2F+$&Ap!6ԉlASYF(œM fyh׎o ) .*-TXfA%J."wSJVxz{ p^M @6[-yw >}d.u\c߶DFX JxЃ)ӛTTA!^mW @hTBb Pd>5輩O7K(@ -eNjyDE-mPSB(h%TIn|_j)ǽ.+n^Ta6l]V_eqbsn^oۢm ߚz'2@xٸW*]dx]Mp!o s_v>MiЅ^kBZcHͱx(]sWe֠?11aT謏 } F9ᔕ@T<5S7cMSZ,NVe"BymGݽy/mIsCxe2UM=5B`ŤfrPVqiJަP=ͷ"ļPg8xa# Q!Zf{%|3OTmgx5FFh"P >IPR-uZn,=c!k6M{Z !*'P+`%2KRxSB=#g7:"U#xtR!~!:ٻrW1|? H$A],Lc@ײ[zXVe;mxyT;/(w`oi^n7%D63(713_Ӵ [%9",\k1!rw4\pvZ1HkxY` v y 8 ̠wq cvXg@/EX&^83iDN1G @댉(Y2Ϊ-@`fGA-.$<頽RƔR1CT9e&FDFY  '@Y-A&(?W3˿^+*$-(H\'%2#őK#Su^2"%4d |ժF`Od.o1ncK R 8LHIȕ^qQ%Q%R*>*π1ӂ(Oggbb!ߍ6 1y`BmNDhmOclJZɥ >U|R,T2zL0r}s:3yJ'!ڔJ& \!Cr]:xԃǒ50=_ :IȱЕѡCЧI1~K'ҴC־|D-(r.6&i_pܨ@UDF8'Xl"e7`Q鏢D)0JDHE3GYF,1{V&ȍIKJ\HH0u,gMV(i#Εآ-/Lڵܪ-g?%ZBD,cv^)n ±&wL* #3F>g>ZSuLfu;k6{wWpPZ`mA&֒i]SU茫#ϓNq.(JUʸz DR$%>[yc>d% $"X(SҙVaWh;:VPlҔߏ[wD7.hl$/9^bbn)S޾hBo`p?/8iQikLQV&/$3&$'cY:NQ3/ʾ!YH `n,FϽ+e&o,ha\Z럳gޓ󕿳,sq2&1zrɑ6s9/єL*["=ĚCߓd"c05+aGa6 Q\R琢f$8`u?-H "{@5/% 8+Αu*pzoi7PrȮDV{_SG.;t<oS%sZzv?!қ@3Cd![Z|gyTU+4X"RCkq<'s>:M 5r4w1Υ, ՃP ڔ},ZYp#ZLJfq7K9OC-8VʺtprmӧrF%Ayfby͛?Є~?Xc;Mn%rdT2Z*4X҂ϙ ds+\\N:mCBL((jS FEZ>Ls> Qyi~^Sk95vҊsy)Zt\vN-<=r91ڻȓ3+ ɍss[MC"!WEDK&!#P80DIG ! T sv-g>lE{բ/Ƨ%jD_Y#N#vJ3VۊZI^<J@䡨"8bZ\ XfƩmCZGڧF @3d:D?;oԘ56{O)j'jTiT r jOe^u ~Oƕ'*Ov@&˲;p ^R0M)Gw|CM(%YjժD }O8U7Gd , %.&YB>CLjl:n:\ˮͿaŒO+J\7q# =Լ^6sm~vo:x r6W^Z_NDoS9KZq!g7jOn}Ug)<3yqpyE2e}f26*xr#!'JَGrޔ|IՀ.&LYzn^"w.m2l4PˬnrEˠ!t'nht{et3{zdϤ1*vBEf~f^[nr͇A`9|)pE8qwt)ZC톴yE1t<]Qǵ[盟7FXY7pG|^>p#5|w'5YdR޲qV%V2RLsi|w|ݓ\.UlP蔳; BJDA3miQP"dJgYr\&sJ`pKs!d+Ֆs>x%}=/n/v#ݴCi<&6Yg%bt&'0%"<9AϹ>RG >G1;.c @hȳ~=oλ|wy}+ݯ<3yZI{ïskEd{Ht?2 FHc]lS6FreH:v!u?//̛# ʠ'P oۢxn ^,a%3ޥ ؛}狙A|bnypߺ܋[8nơ|+qYen:\$nUYHIY,Tid$AJ"k AnrN!(2ZhшcJdsVc?<ͲS*번c_Dz-lhbQ[On5M9Y%0 >"*dAq3"$MR*T ?9r LL7r)$~{͍V~Nn/(VJh{q2% (is>YkfO) e$2 \q$tZpנD< r!XĘP2:\EH&ޒqzgIUՖs`LR^LjREVNJ!B;?+}-osa56ŎzEzmy5cx">:@Dd&f'9sx7Drl'&$̑)/_uzӄƚ_M.;@&jAY'*"0H]`r""V<E{`!82v̀!>-xe 1PI5^,?y@'ebE (K\ KtOBY, $k, Ș(cH8: 1{&vYB;s b2y11J:z-&3O,c=|ledযazV=xL8_'~0)\!˱I_ݴ~t6y4Y/]~zo ZKWqnnO0H|n5NGӽL);|}6Ͼ.r҆q&WH^{>P.-=݅龟f:D((7vČc$$HFUf|EdEK Q2&A74[7`mՠm[v6ܺOp0'J'X&Z毋mJdxzߪKB? o,%Gg{QQT:r7 ]O?}rƬ`7qK>ߟ~~7i~^ij&Yk2O Y-#jg"xCU07|X'yܫ}M4>PܿfF}|. ]^z3>`mW핣]Kecš{3?M/OjNllN񇥉~[8u݄g䒭g秿1.O Ct-?"HQ%ن) J < J^7\h*͢I%HДR<6@rSa&*m(.yA#'C9 DIM( ’V2Ysкd}+qQAgx9p |>L63,hQO d=gTmwo2Eɪ`Vl2?])Ȩ)_b .zNsK Rf}:mLVZiwo g*,26DGrJSS,/}QDx_̉hS /)z5pWX$D&(R1 k}^h~KoFX3/@=+n<Ƚ;jngи{ gk y砼h-#":+({x!M]ǜJc#;ܝovr|gs, P[@ j]cѱ(Ϟ+03&4hR̂1tH< 喬a}2+p.\:_^L~C\V BrJ5> '-uE@eA`آ28*o]oAwYMol߲~z`F N7Ѿُ}cgoX#f|e!C"(*HuG>j`訒*]*E PW! &ټ~O<'w8Y^;4lzKІg]SSlbʰ'tZA:] s:g 6QtSpƻW):iQ>IM!/|O&s._yMl|dr5abss/'_O6DWJ8VUo]w|bmJ6w<ܼ&@JlO~_|W1WSz'*0c[vXG''IYӜ[; 9[؅l NL!F2Adp.,; ݭ3n%K{duژcfQ"fS@> K90C+lM}xX\m .NU.$RjO^;%ENKc`3qf!v>UVIX4qv_sv}_N~`#2VF6{,:*Z\WQko }A;EUf+s ,P]wAu091b>H<e_ e.X)\r.kCy쳇įthJ՘ yDAia*C6$U %-l)SLSf<odfo j'z3newK~~g//M/mWn}v8=# u~%w4=QPdt'ŎM^:QgQr#%څD)y4CQ D@ QJFDC < lm,3hؔR GsEP]h"łFଵ /6K눶f2Bw7zH4p _o}Ըb߀i}x|sOӒ0؜b~_O ]<9^K:u4딁budt^9B 4gcjଓp|̯s~ʼ"Y?wCrr2nM'-~r"Gޫϛ\L>g .0Cp@A6$)%/;NmOrT.ާ.O[{%63]]-gˢ 2xS(%YJ-Y| JbpR `ştJQ\̪X ꫔3#Q!36A`cVo"˾1چh Z>|uZ\{Stv57$C jʏ />@Yamr(r]ٲ›c9ȨB5][ dɱfT\dK!orҠ(A5265e){f3c[,PXX(7Y~9e`qAӯfyaĶ*IDҔB:i%%!RX}dc+*dThvb!~ I ADM3%SI.%f5yIZ3.fPPێ=2{m6|rLTP 3u3ޘlY6ܘ贕[ӵM(Je23C #fXZfifknC*loy{\Ƕ#qD&, ٚ!ML*elcd#WRUX͵ol;hWi ^ZÞA)VMX7TݸXV~6n8x1lod/?U`t?z*I6{IR%vzёH e<뱯|,d@#š`Bak|WHQ8vh'wعwZ{` cjE䵵^$kek.,/ΆlNlǘ-cV~?f4a!H Tk)~FVq紖Z:F{]H l繫ݗyll|XIqQ )@c$dPE2:d\RmB3 2G;XA7!jj5:A/fua~9:/K _^nm]1p'5l* aOl;ظ*WuPbXZ zqUJcl7zx'P ^O^9ThTM>L[w00;M'%t4(/ߣ:@Q]7Y'oO3L?M\'IK Or=u0 Oiazۡ8m \ ̡UapU\Kt4\JUݡUV]U)5dUWҡUϮj4_#\)!ՋӮpU6=vUu4tb)^!\uH+؊1p0*-UWB p*WUZ5xRwvi^>_X_v2t1u'$Nԛ

|1v>G쀜vu;dɌ^_y3?OBHrNd`3T%`z23ܣʕGr6_@9x9w~Ovbt Ge;w_o}^ʎsͫ@ݞb(e0r}{ө{L":tKo{ר &>`4 ` bRK`"9dҕ k`ݮQ?W>Z~7s,rS*Sd34nk-֕D +ŠzWd֑6"G #Vd&OcFC0lPcqz0A>F H+.vA!py`eȷP-!TT35qp8ȭIRr (ajf>?K׈iVI'vGjGQ*(U4HYuvY"|pCf;̦`'N.‘dmvi N vr)&UHH[[C! l}VM`q?@oZ n^OB1IPEIϱDk)ґ.qn~x4tQַķʬhY.wr,4JmDo[*o3[&8_Y#N1N~׃&P/xۋQ 5-CJ(v^t/~:sJ[02غD&&(%D3&1A7Ŝ(]|Çӵ}}nEڴV} d}ukWLZ397ɼj4-܈vK ߹|~c cu:D? .?x8[3ׯ?_wѝY5ٟ\fnjbuCIo<Į?9gw+.u:t#+̧.7??*K QZhGN0aYlcAۣDpIt[rё0BD183$Ws7Io2?p=;)H#B$;0@#.' ^xx1م۟ˊ4ӱ2Zxֺ I%( =ӄG@$Tr"',IDeQ.H TxpcD#B|.ހi>մ޳'ԟEq?;Si\t&OLgSej-i< 2 iezC̳~h*;Ǟ75h'E+KbU s5CF0_Pu*ɑ?UK*Ug9P\0;C)z"|hi5 Bqfc 2ķjoFj&u]!ܷ>$qFh;wGy88;l]9_@Tܾ2نgFJTԣ#[D#*Y|ZXzf+*|?%,^Zz"m9 LkN./VST(tL*$0T́"|ptL*h:`GB$7 1SEDTC"M͏rrq0C2U" ix S5`0 1`L8ɥP)F/|;@9dx#Š*; jD)"1U:00pȎ@#ǔdURj9)}8B(R&hHb./M%ڄ`?>d" ))! >_6|6:qb|h$,kh[9VT`QOp.G@ A0161 PiLb3BK:kwG'( _dudp,e[BĄD6QG`j8boD"riAZ>kS9YudTH&v*!S߆ofjpqM0o 4,70NϽל+Iާ dLhz:}|btsPdzao`3$1HY#h&r%]|xҥTǠ[]f=])GiZwoGu-U`v9o&Eϻyǡ{o%DE0v Wܳ0?3(.j5 =0g0'H[T9bgG`|vQm1t^>nJi5 U#@cS,Gρ>M|fg@m3|Na_|gۃfD`i\"[MgbDVS Qf3nFq́@.UQ怊=|W{ Zxڋffl- 5۞l5+ `n _`Fڒ}嶘 #5O~)O\v3_y6/}!+ǩ36!G3\~x|h cv@/ /f;˰duUa O>tvNS4fIi޸ Ex=siiTqpx߭U3ѿm?3okh^yT[MU4=Nj<չHIMVu:J {QBOV1>}E~4}<Տl5:;5U8[ؚ~x5Ct>j&:AyITڝ_ -5Esm[:;Ӓ٪z{\:xVs3}z,Mνmkvv!ߦK3Gaeu93z; Cռu ߼ɸ3X_4qAa+KSZBu .-kD%u*?p#}Lɱ]rʒNVqM؋RY]?g${, &:O`rHbQ]~b%sg\K0iDM">T,-*RHZaPdG4v |^JΉqD`Κt' vrֵk5 AMTITǵ#㌦ԏILki䶳JYy5$MFJ <ƝcX^;,pxY0~h4>El]fyʖ7h_n'n^fhON&a}.Lumv͚kz%ᶳZ[o5&j#I&=V^ H\De d >1L1[ˑͥ]pU;Eg8L k\Zeҏl~ȲLSQD>ػ>8\_ZMFftߤwf:,ǿBVrs0A~pP޺Y%NgIa?cy{PlN.J<G}sbSC~Q Ftf2cGPgN_Nm8q*rDz'XVIEz4&F%z&ҮGtF l8y%&}V'gmt^~"]M/ ")1IvwDI|  &)93h@9~ X 0`+VZf0gg). thPU=5ڌ8=z1_|K81+%@&& T(^K4C 3":˙3]/7ֵ]m!Cvm&ew}3C}D?!i$QHᩴCF.W0l9?h Q)'ڸL7:(hV:PGJAS)NFӹhy~NrQ*?r:;=X?f2mY&ێ؛9ϩJJӨB%} VEp)>LŚ],3fb4XfApx/^l䒢@4>Plaee ^%`ջ1@pҶߦT&K& 5;&)\9/vڽ2 "*Nb}!W^hNJ.,H DH@+!<+mC5;}yw1$mm n)ySk)0+ o% W35oEoAP]sc*i?z/҉ޚ޽"&2_D i%yIK_R !=niuRuEG-zt^mVY1Ѡn\TTj`˭23uߙL̇4(>(;d,oׯ2pwc${]} fbN@}fѰY, խT *-/ov_r% A"hd]da)f]uYWCe]BjVBe@ 5f]ux!b;Ȳv `Q(NFOSb‹յ=(%[ :ވ^\Y@TeO|@Oh 1BtXdUׇ[h>&lB1n"^WQ@,-<WgU2^gkxp³^umߌUN7֝j _*RC9)$=S،"]Z!Gx>pD>=N:oNArNSEIm)~T؁e':ÍĹwʎ 9PmgC)m$>@Vn1zs@ Z lEOp?&aWQDmexSHʬʵn8~-d%M@RH!6]jIԍw)j{r^ >J6Sr*2)kqֲ[nb;>0nG–_TpMv@C"1zRo1ȞBH'_RO|At|/ɡ&w[= aMCIkEuE7A6>hR1:X`*= $ f{E^q'z%`&j.lkyx`AfsLvY'bP(0$ ebRSRA-Q2sY83Ui<'e fF. `%jN a! s`\#gQު:1gea[jn{넁d#ѻ )kl"/ 9 !`pj9QԕYLL_OmnѹgtdL. ,)*eE}鹠SM/+*͌ y Vҧ ۘSKoLce{hh:"YlԞ_󇷿|zw>.BI9h7'MZr^=rډ][xyȷU;(CA95q8{y%Mfy=XmwRX&p޹dTf$ȸA^/Hʰ|_JIYɹg|(}+!=?^T|$sm9o_)"*T _R?o$]٦&e65/~F1߼Y"7/4\<)EJ]Rs%;GfyxL}F[ŭ9N~ 1 >L`VDXK}^\iZ ?\0IZ@*,!U{ȥX,+[9ML}xvR0pc+u#*%cdex֡ ^\LF3Z"ZloW2@%XᅌCbay ;t3ϫ|j`S8]i{J X2ʹ=jg)c׬[ھqh)ldv=!=f?9H{ TbpC\.ؽ a;a#4t6_͙߿'~{,v[&Uuig^;C ݵeQE%y!,뺉cT !p`2cJ'a-3arz%r|ƜRGN0Bl=Q֓7*uk5}YP*S/ J ˽RDFB  Z`12I)5l[׭!SpdY 9A^f;W& $4#;RZ>cG%PTp6U31L> d.[Qo bX)6?>~^^ljF׷Z ?DyiVS0b|A0>a3>3H@}z>f.b.}Z' c u-'THjW5Sm Ra2e< =!aC^E22J }b|Fve$KZkU#K8{.kZb0QKb?5Z*xkzr-dU^J ~{l"E 1p8+l=&PPa'x lDoV `'J%V ]Ed3M7RKE|~")b4l}=  (A'Da3)\R1Hg3ǦE*CW!d4CBI T'5šJPi=S?Q6I2ֹΊ7~a!=a/  dl{?B v'rW΅rƮf"iY}̫R:U}7gBлI7jG{yn   ԿOoƣY'iq?]&=G5E'2~ӤB_˥_ߋ3}\y@ X*:e^0x\CN@pSO @Wڈ]ed]D m%ΎwLRx=4Jb;̜aK[3D)5nj;Gw"$TsVuwUkM w4n~jnO0,B-r8Z?I0Cx !}M@. ᑳnO™]}K8ogwb$dPߦ7Ql:9rT[Vql tIqŔɭ2~8:ZÙu͞&K305zQMJ?y!ymOŃ]4Ճg<2%wfe>juym `7TRnD%. ]?LD ΐ7%Efj*ns\o"+!&XWײiJMs/fb1ӨrC[pQsA4bStꑫǯ#s '0Uwez9NVRXRds_Z/,;omhD[(ֺ-/ _L'?/]zN]]~/iLK?xJ~zxv Y->-˓ l?COaƄMy]h`(z$/H4*ە;9=\LQysyy_O6p͆LPnl3|} L>_֪죆Rݎ.$ nr;rJַbphKvo.XvQR1;cz@J aXE1;__k0,WWϖmI"a:"= K{qx)PܫDJILf4X? &HN5*E#3^F h&QeWpx0+Ya$6q E&aJ$dAiJ-YetFQTw%lV[ NҢ_\q ) +W=1#+#LccF2KyͥV?Kw5!*AfbhLXYsQtLuf*V1e4FęfI༗2-ݒb;JU%_vbqy97{`(񑫖1 zO^J >t~o6ɘ7e1)a$dw9ݝ49Ī3w9K&W`y2'pjN[{-K]q)"hw2 7v沸EǯgKvkG2~$?^4r۰^9שk3)cNl. CYe;jr<&:-yzp8kX"}16 SG) f0$+n08񸦏2bHVCu7/JSk!]?vdGq}vhҡzpZz8y895ũ$ ӪSj@N:X9fJ N7`֎ ^0Lh ־I>/:tI=l8; tvbhpz #T9`}8kn=8x WFq1=ΈܫqDq`ޮpRbyvPR :.g[:C/&ݠ!su|`? nAY"pf+ðy, Cj`" TR dX&ѬH%X8TC LIX{0 .)z ϒY.gQP.Am+y (R1IBbĐf ,1N@)-3G SKӞҝZƠ(:rG͉@rԡ@-2_㉉ @"xhDcviBmr=?/K4 ɲ$ASJQHn.zQ1XW^\|P᥺B7>,}x^/7f= E_A>< `~#=A"K շ2N `ʩܑC1oN%鑒Hxǒj1;SΏ #lR1%(A%MU vf4@eG\+SkU*%s\!(`P)nDұTNj5pNz |Z_J{mMRS)ٟg(ABYe#UDQ@-P oVRIpCJ+jA*ANczJRr@@~잱pDTA@%qB#QVޝ k FSeDԺ<; 1:MlQkƌ2<2|&ͩrZzRy6a1kJ(N<D=CląJ 0 HFH<^ 8DТ&d 0O, 1cl)>O;,R!t Zc^ ֨FE٨(zSD 2S k- &c-ř0!!80Y$Y};u9:BhV/ gK I__ke鹻,WWv6 onZͮl̊Sٶ3Q2I[*Ljwy?8kyt[+Z/R[ReL2(3= ̌f%q1HJ,Xb %MZLlWʞ*_=d/JBW$ՙ&jYBwWg,0PNvG&ơ4o]$&vJc&L⩦)jN04({ӛ6H1F{Z^bDc}"nç`ʄ ~vT<حl+1a(3&JYc3DX49.6J ,PPI ̤&JL*$J! iV\)A,62կ (ƍZ}ԛkcJ?p4f<7TF  x3/v[ v=ΣИ$ևM[|v NWR'[!):^( iVlj6jvyrI؜ \}bt:n)bN0NY \G4zxj}ۍY/EV{b|;2f:SPC $f]H@ H1ujvk(!J cTt|:3 1t6WЖ3u7.Q):5c#rV{u[P=-{`)HUӅ6p`n%ϡHI,y%KY#i16:ڰD]@}̷bDB~rJۯ^Js*=}ȹ q/2ҚJCq)Aȉu}]A:q3ʧ RA*+ |X4ov-J站hDއ_|ċD`M$^^*̬|1!͇A?|AY"'f1bKB*`֒=OjunQ\=ÉƶY}I҃2rq/tbqw|Qs000#rmʆMz3Jp=`z!&B]WoTZli9Ը֜u!WCڠZEmZǩ QVZ ^aV7`F- gQzp1ك{0alԋ`(2_P&τKԻ_q m0~UkK9[}Kז["Q;T#S W Y@89>YA2!9WY#8{E\nvhGUH]AuBGc^ CT Q;)  ܐ-Mnȡp;`  rvCGzû!)Nj 懰LQڶ$_{Q ʜƺ9V((KN .B6/R:WkF7"K)[(]dWIvJK@C%>2J-Hǀi5 R) M\iH(BT#e THcA`L&GQjXHǔju^.5~c묱S:yJ4 9z+7's*V '\@"J9Z$Ô'Y,D4%q"RcgDBz\1`JkWQq|6!aaָ>7,tPu}PE gdq5+EHNa$"3nG͹dC8LAXBs~)t^JjO`Jv`M-w3!eDE-7t6n_1e򡏓Yj] gH$DY;\OړN&UK]B[<8m3Kv[vyÞ^ ;-#.a᏿!nS  as"Dβ>m[hhO 4x@%l>{S[-Ԉٻ;Aָn#9tz þ?mrB] }zy!sA pcxXk۲۲$KGA+"cMoW} 7*R< ,K4Yi(q YaarU]sU}8-/EK]DL*p"$ '(L"Iӥݝx4 Nxf`WbxB;(C~$ -b8nr\HVwztkya;=IIr#䤬z{w0 c ;[sRn:Fv2X׽ya$o 3QC]wla0&F;u?!Ywu %?S ѝr[u[|*^:)ڂ^Uv(^<6Hqw pl>y@ (SD{\1av~dgXq-8X]7 -C*zx#D@D.,J0Ƹ6j1S35B+%~k$G0%ݤ޹B)#%B&9#BOf}IaA/CP{ z\ՏIT4m%C482xOeOn*玨HesفG$d/aתsYO94\9A*kaiL!2ݫF[GP륍\Nw6z?3_"[ 2Q#ie)-2Z*X̊Ttrcӥ swXԙϛ^9!I -L`3zɄn[rd1%,(,b(iZH$h!.J +SYӜf%$ڪR^Yب +)A(bI%2'|%z?2SJ"^T U"W݌lj$ƹa_0ǰ+y9.ÙjN85_ ?U_k9܁ LkIw kB /@P,8Ui<4doҋ9 y+ 8 + a$ǁ:D.$ HSd-wIh{ fF8Xt?? L4cm`3}\\7sӽy~VD_/vpwaRؗEo,?y|~AZ+|XNn@,0yQzF/Y=׷w7SzYbYwd_󦃻fRWշ{㻿起Ppf3;J~%T .SjkP J+ÛB(dwRR<y"ze`v30̧qU"yύ@D_ꌻ1V YV=fo8鼊 Pu5ez8? oϭ\EޫʂrxGDl{۳Bub:mn5f`A]ghM +ƫ& mgv;. JYk[[ MԶ)dN[l<ׇnלag]C/1'rྙ~Wzj:N>PX,%E-4OcqN[z0t?cI zyyVյO#5ѷpS.lpl}p{%{/BvZ)3_kQUk.tPJ9XA/d`Tg ϒt3[s>Sx:;GS6ۣgF}ح}w&bp28rxP7Oaw b7^+;*!Vb"x$T)ȈBTkHVmtl:!mָ @s= 2yO2 yn."oyv= ά# -8hUg@KX{tKt[Gqw7"*htzI{Q\{uZ?\՗0<{Y/'W[| ;/5FmeJ,X mmӃaI.+.݀d[Y'}@LWzޛ͆㚎L"lC"!8;$I,P$%D ź:(" I( ld, ~RLl0%;pz;@mD4I*;We9]@H9j&~o//l^3$`]>qAvQ*{,Ma1E!rq;;19l7c<ĄoWGd%,O8$"! $+JhQ)Nq ܦZ v#U`JJ kn+|2@' = ^ 9=/ơkUKCIБbN@u<A!SdSQ(d02T);6آS:bBRĆs4u5 Id W{#/$P a8u4SA`Y]JZHa!+VC )Q"B)C *Fu8@YGg-ȶe.3S;EYR*0Sf$ CYReCMeI SQPBVP+ұ`Uً[<$Hѱ,GCPnf. R(\+Q-DKp.1U# bJ+q;;n# f} 9,^ ĉ5M R4Ps*ָ \ /v¼(K65jh£(-ҝֹ˗ͷ[J5Nj~=mtYJrߒ<՛btEoˊf t}Iû_]ehf;IS;׎ϵ pz5O0Z^жlB%ڀa kqEe  ĀR{1 Vt$/0=w $d[Gqw7"y!lziC}Ӟ*zN=ZqRQPTI/QLdaFhN$\T,dT㔤縠4,B)!pQ-ZXqRuɻ ^sle>]\SLcykWt1,7bz Lֺb"EB͓ε5 < tJ =oEí_6̻giTc@56Hv!t;8Dv=d>›Xdny3*M~Z9SbD+%Ve  3u@ F i8 ':u>j#GqS%KXR Y&g,B`D)@icP+˥ŝymIPwی\sk;hrg(N־`84!p/4M]GXt[CwNQKkW>&luů`]. B?LM1LG*m>T';Y`8ֽK_0V<%gɗ/_>67^ㆷҜ. cGFl m9Lj<cg+jT}K -"!D&( 3] ,V:TqijJ[qì2+z8ZvnO!|'P =m\(zjzOsp I% Z,LIT@ (zlX}N+sa2L^<>7v0R$q]LJU5#͗_&^@ڍPWāV/'\j4<_}X6SWףQy4J3Kp'4r֥b\c ir&Pa|LD(-*JUWG>N굟 $uꢟ ޵q#E۽o2?,\p]$BEli8HjCnv1 qb9cE9nd(/Ctz7lpLe̎![/@c6д`ߐuDwsFUXMpvmT0*+C<^E Ap[sVj¯oq2ەE Zqۼ 꼊^cz-&.X\̽j`nOޢW' QM8P\ sXjh$ murMﱜfR` D0"LbͅaD9h0TYhtp0+Nb$1bGJֈ eqmFgF4̇*R=M^ })i^T07xWlxPNR | d$FW@ӸCib2PϕlՂ{3"(km88ᠱI*, -IKimA2ׄ&A]CDm=2&\\D vF8{ù}Q;wT2ʺu -!Q&h"kP+iôUWp6)B)w22FI4v DBpFpB"%,9U""%FDP d 5L7'8T[hI{ 6HT"FȃUs>. bxJC/1dpp*Y~YQ5:ϧb:Fj6ʄH0)Lx=^YJ`L HM;4;xͩЂfK(Kx>,xƃ$ad\ͅ/N{j<%e{fyā,KT"}5QALR21 ]uHX |DnlД<DKmi[|ps%s{^ԟG!(=`2_G[El\Ozsڪfkة[Y[țc^H:t8 9 RϙT{jPN|jAJz,0t=~HXKY!WJ1u{_gD HtGbTĻ]bT 1bZg:jύQibN%FۓVQvZZnV'F:kγ/+#NL >m(q~ vP5+ːTL. >3S"D+n:\/3OsaRWHz*f/vn߯EvQYnhw˾8|s7oyon ,, =%g G ӓ~nWpŻ[f?OO"j/? c5oEnQLSsePD5Ujwoclru(0Fjux~h$mL5-˧ih,ߺP1CHW'EB|97Ӆ?9~uL ۣ2DK^/~{"&7u4-H|(9,L|(yTk}T6f}L>)BY)E.v%0/F 6p-cy3i#EkYvJN\p|l'SdY-8dc"L1jlٳ ݖK,q, -t<=cRө`5m .+|*h3]^G-⼎QLyl:Dov#=y %Lh8ɃdQѡIb/5Z)|`&'4OUv1zrT7>@etttIS9%3PTS(1x٤Sκ0~<*2QlpT&<޾}c2؏Tt= ;Tv^oT%wT> Q٣:6i̡W/u4 !Uզ;Z(!KM M)j`JO/Htqͧe,r˳\,|Kߜxǥ.#^[)h#xTDRq93=zPhTNygCUOu+gϽ1E$,J\, `-8FvVj iяGĒk`"zXʧ!&Ãg7;\ {7=SZ> gW$S9!ooh[@/0٫^PҴ yϯ_ryW/𷤙=fK{;e!\/wp|&pms]52.}rLSƕ/ۺߗ>KyÉ1I**R>t 3,C.Y0I*ɨ1"2ɥaĂqFQ̼h__rKm_t΍_j W8\JzvX l6Q51[ J"DQ "h4<:b bsWj#y-^,5Tk9$o+dO#cЫm>ݺL82bFīqkj$VPӞQMd -Q0T>OQ+[\4K#W`Ȕ7t\БxBE=S i6:Y2RQZF xB5gI5Z_k@ q铭JH } S"H X^5;ɉyk(ZK+mSRބY5h?C"\h^djpyƛh q Z{OO~<2k|<bh<5'OWlhooTnikS~J+V ;f2oNn'RBr?}wi ezϮi ~)NwW$=MaJ hGJH&H}]o/ng74s:HZQI?H&H䠐%9UZ\1 എ!mjU!c>XIB ݻJ> 0}0 [{eO[ra@Һx MQ% ¢I_x`i&tCnjє!| lz2OabkP-{1]jɰjo)VN$6*L <2hD0=EU=qW<_ @r)}ײZZJӟNɵ5ĚTxb:_kmtZ4h!OW6rIٯ"vn/+- CuZf,/ (3ZhXBFӘ"!YA7[q׵l[J^3T6vQ* 5(@]h+/@*6txgݐ1x! 8Zh_iqc/7Q= pjxwed Zg!-WgP1tl?75 @9.i2]{qsLkԃk)PfgCCX54L$;hјR㥰 t;#d4UmzϦdwwZB4fBg0a Ռ<]'}:@gy,o7=%%W ͝{NKB@A \[ 9EqU}tB&׍/R(H,0(ߙ׳l^ϲy=m^ۢ̀;c9 29ΨZ>@DgfeBxp:h_,ʵ/ڗeɄn ; ?7,~ྦiYy.9dzrqR( ʙDT"62Z éeRs,,}q?_^_ jX`Lk[v/bݮw ,\kxTThcbI He4J$t6qb6f<ׁ&(OiJ =Qg},FEۥլ!jcz5BSweIzYQTGzt k8D IuwEJ*RUb u؆M2+#""/B!̗O-5 I$78KLl9LNS, pałE)ӆ(sZ "ፂO0 }>eͰ߬}h4`hʚEX.4)lI?l-V)Tht1Zsv&̗e 9bpuna';ҥ8\PvEwr!K>KEʳ#I":V8NrI}tsHɦkӇT敋hPePbb Tzu`F(#ǖeCy9T#0y3҆Ԏ%5˩˲zI0yKh՚^ O.]^,i3b,1"JcY;UeU=ս u $& EjZ?g)%Q_aB(ys qy Z: W(%:.s1)s1xL:pcMp7$p֥%Z;ybrLԯhVm_?7s) ]!;4:!:i|mA|Fw 6:gu*h CP:!4{:'eׇ^9d<y<ϓ|,d9-:\GRFRbA `hĊK>z:ܖ/jW[jr3 q̼LmD U{ʌ[L `eXe4! PbpA>J/~B&6nBЛBvbk4hYb|^l#BwkQi1 +*bJg l@Zr r͑=`aFZ\9K5mHQH:x͜ØyFX2XT"D cpH 6*z#,THdM)q@e]@ބl &-Po `y]9Oy5A淇niK<-X! }{v @e >*(UҔ+ ${x)֕zH|PLuD=s^`NGACeb{8=NMX 4W¶lL-5S￁UNd:y{` 55xtk%;P c1exhl2#q-v`ʱ\G,(. ZY OE q 0* ^LwU[$ ɑ]0psl~ ؍*]uO|^К` JMV]<0&C30׬C[2&FBBP2}uUWLu^Y31HI2[hXj>a kӿ=2* 5k@a?Nj f֣PJInl'fYaئ+sy,󯧛̈́^(ܕ&:Dÿ`ahGX Dbwn_0dwVRcT[7:ʠ0""86h<;4 *'H~WzF⯬&3ع(c`x|:S|NU5䠽nn\t}ܦUg -gwweu~\YRl+sSw1*z|?$zh5?&g3'[L] t:Z3`8L;x03a> )zd|[TpӃss4 :D. J?=8E˧EQ隢)ډTI_ Nya ʭ ^?ng7+z91OB>!OﶦkNSe=UJY` p&1!f.&L$n ~y(}.Z,*xwyP?ۓs=fd.t̾{9IDޕ6["f6~ ;AL2AH[h[Hr/SU%򐞶xʤI(b/Qq7 r]Ov'%}YӳYŜT~hVSR4K#&u5kƄ%BﳹϜ K!).u3VhP#YU[x V;<^tRI\u e=5K5]E9r 倰WX @Bk3N ^q 67"!g`eHBYcMszbDucj[uc@XEH_˰gŊjaYbI a/-A^wKYߟuu.BMi3&0<Ì*e)$!g2+ B U`\Z4o0!@L@ R{[ݤbGbҏ/`tOVۦNA(+J '_BTaJ,"ńei08d' F4dgŬ_UmN?l1w) 䴲?̏bcSp? Ei-$%m3@hcleܾ-rrG ~/9>IOS& ?ԉGȩ_jX=uMcKɱjGK+MrSh,vap?`lEIp0;1'g$J$J^oOZ~5^{uU(Aپ1?urCng󢒁">~.j =( ֽ':>2Oo5 gw ;nZtW_ke>׿]LUQHoא+WgfvC^gp&yॸۅvU1lu>j^CE6b `q&_{w!<ċ("e62y_gp%l/9ӫ{ 0[v_f_stq^Ý-6o$KQ{yA6du']֝[ ySS:|7ժZZk+wϵ:/\fsQd.\.0EHXDun:c^ 13B0B}bT?=:O#wZm#\\#UݒN<6uc.G6tfs$:G}MT/K,9r3TEZnm]=(yQQ[)޾_y]0.wY 6*sw7fb.(3> 47́i;n%f|/nU8㯟|dfO-%%ldW$zh'4 T tWT-$qE8 ;=YH"C>(ȁ@^3F$ѡ:эg-$Q]~*>\F^1r;3YΜZiZ+љsӊZ֊ڽKnbQ 8Ѩ^t\zC+fٛ=Y6rZ/ rQ6(׳n-$ƜtF(,hɫ ;^B1LZ̉ Cйi8$vBKfn Bi_׵ k KYhyrE `ߵt ص[BfVRlۭtn~u:7OcO4(S κg:}P-8fr} DW4(ܸ*5X}P4V/ `tvseN`b(L{wLf+MWfcEp~`V@(nF]ں/+ %E}yэY MwY95uYTVoVBb֜߿)+$=HRVp(iFND C t/ !O>CY`(/ZK{G ' or~:z:9Z0E x1^Xl)5p4!^Uy^#ă ƺC/'AC1[8iEi\c>י NAbcԯ$e8x/z$#J(2^?EydW3dᖵ䘃xm$ye~w-Ĝ$OE/sd&&-ON[iI5Q{Ҳmk4oj_Lw=GEѵC)r"`'zXa[6'ɉDs2ĭ>IV%4GQ|N [\J0͡kۢ$+mZ]G$yѼUm94VcvuP>9DLij#A&A`O`PtHꊛo>wxE(Cm7G;!]o zh>vMrCQ(53L{{?.N|Te̴ AEgjm($#$rP5EIH(CddsCwNVzKPEA2nI@on)pD:+t2)]4}?DJq,UC[C}K0CJ`8V 0B߸m&TF 8̮?SX[Q_)(((SdL#4LKV]JM=%e@hAT @s<&^ѿ|:XoGS*'JQ@oGUو!6u\# 1G+_|h\&~t3x O/w?}_~x R d Q`)oƳX⫷W VSΖc_ߟ=NyQ@pVjRpY%O~Zxoޛ #Ig]CkAT +|pVY穡R0kg2 9P22S\M5,;A GL,+GIJd rK @2 %kI FRcQ ŞS-(K$䌬` ggşΦހ΃;[;KA6a 47:VWD#WFT(bf0g(yYJ-ьr\+3`f8eI1%H+ Ik=pZ@YP\8o7j S|)$E\¢@S`SN)+<&ցz$cS%q p%&4X˼ŽD'Lo/zhԃE(4ρ'PioCYD";>2̘ %?af\[M>ŖOH>/̓RE12XlDN-A )gBPwNCIRE$c@W4nf//)Eʌ8P$j"II%{MԓayJRD"&9[4S(v-͸v:g[fn|HYHr; S(5]V@n"F c^j/#EԮ- Kfv*1%ZvE\Y" "M~הR%4b3ge (*& LX3!&W!ia|Иm.V ¶ X22Ap\$UfAz"9)!.bxe g>w$j,.0k ~+qRQBq@t~K>NJӰcIk*FnW]SP]Iȼ?kpH8T(@8$(|. +WZg`U)A ft{T 4첈 MYYXg!i kR <ń,3GDvG$1qTr 5M}my4v9ybX bDNBVs4D*A#gX.y*ItZέnW\ځȑTm%0O5e"1rTFxB=3enT:tS7 =U(R_EJVyU+A|cеԊ0IsTzGEac%NG*ǨdƧt2EO b` v˜fR& 13H#T3BEֱ<([' 8I`75Tvs4A+KII&*!r]8p˄ߋ,ʪWwjgoᨒFw C]2.O4pQ[t;$PE?@$JCa\#)1xo0 UhJe@aYsQ 6f JX^TGR%:~!9o _u"'KJzLRXb)M|'ⷀBlČʘXbJ J;ΨD~Ea0sB;aG[PhAP5x4%,s\GG5]iYP8PT<.b)ؒ1,2Ll"&L҈#kU PF֊#ehb\MCGNΑ "$2P!T S$RyD 1P*Xb+Prk R[>Np~ .`/ZݠE pݞףǪ&D?Fn2м W{ ۯ;?4TXKsLLfm3OJ9|4Jh(bP۫d!Q ^\=[YϞ=ʓДC.iVD@ G)_TwFo1v}SNu׽<@UD/t\OaH4AM:IIژw,{B~'Rp=}W>9t׳fJ-9UV+1~黸bZ]ħI>0.ژ>^UlRbAO!GFq, C60&%@ʭw4sXb?v'-DDmAGZmѬ"tcl.2pVﺬ"_[-3?L&6Dq Dl_2+^h֏e,˾4Amg&wԱ΂9Us FI1(ٗB 4}cbZeL쯹J .ݚܫݾ3ZY맯mZw}skmRA(w*b?0X{ֺVZ{6~ιA|kkh$s՟:ݮqi~CAK3qۛ)IH5] #ؖMt)vmڲY@7sjgKÐJզgۃ1{usU|};2w6"o?mer Q.htu1N{^tًi<߲5i3;Jiv|E-I8gqpine[%9%-3B%:49Y13$=}D}һϚn)e8Mq9.jr竛|>cRd:C$|bNS_]sf_|kMFrTwYL:x>.nRdJBtg.ߖ[o~x}Ml}g7A+$}51tjMAkoZ tpRutn'MJ-‰Kg8kY([Y%֥32B>9HBmRziݤ8ɚOPh49`8P (P#~Ĭ۠B)6)3rh{)RMl>FvgW6 zG&s}YMkPxf<*48+!X.7DLJx̂ bWRZly|Z^0 =khva&{VNҀ}M쯉.6gZXfv`I|jW~4 m0 z+{AV{jK:VW&2*#02CdURl*cF/0lBQ \V&C]̾tpͷ2`;V&< ^`w|öiD mH 2ဃ8022aםcW&,; Řgs0}BCA>gJOr(G1Y@dH5wܞO1[th BCC'y†~s07<OUL2̬&j#! LjD"BB&#h2ZʕC I-î讶Rt?\Lnwpr.Ne*g(U(wa*pL_ߚ6m:wGx }2 ޷ޱ> .H~R߿eEM(r7l|^\겳rff9;RNuk.'Nsջtڅt\;oO)88ʶ}on.6frz4!LO?pb w]Lޕ7 ׽wT]K_2Jf\H J_ϧܝz-~[,8=U}VٗLs(+)R/U @(BI*a] Q68=Zy噰Ҙ;Ocn @Co[uxZ;gIͯaGC(ςs+K%rMЄ - #$m!rwU깬q+n>ov٬>VVV?ջ ]YMN`Nbف!fSˍ3j˩geh\~Y)5ޝtcq9 xriv9i\>I+{J g'7j,58;9 Fk5qUV[c51st:y*\I:y#xv2)* |G!ǁ-F=U}DC9r3@)rk(D@W "%ieHf)7oD>-6=,‚{XwXշNTT3j (wh{avڱ5*kS;JT+kfJbm4/ѠA sUZq V;>J )d0\29`<Ȁ.4,`<C3fɏ_(]akw{(#zʊ#Y8 UU d0~mߧ$edomAKA}hxUEd[X%bdԬt][g (b!'-QpmU2:_/z*!$QPS:W/ꞽɻTiEJ7U`Pn/Do/FC5&9?{WƑ /ޙ>uLV,^FuV @߬nh\d5IA!"٨/Ϫʡ 4;;-y)N42X!13:WYr #\RmwhpPK0kfao/Ɛ\/ sH)(Xǟ<'|G1p^W/ut<5.D~26_^@ < 8^ߌ`>SDU cF&jJ&QJ5#l06rdpuJ(R1GC9᱙`?o`Hۀm6q@|V:\vט'mxuĎ A%ROr0JJ?U9cUAB1e!ƖsRƔSכּVd ɟ& v(.` m1fS36X 9' 8y!3DYBցJc Lؓ`(6MܲvǐXP)\&|H~;j !TݥKwI>iP!J+q%"D^ĕȋJd5aE ͙C3\N$hs06hӟ\ d/j05}BPw0ݵ8/$P' M9/m-DcVOH b|H0- 'ZGE ZZo .<#}=Blgx6VH!HG葒HFLu>J2!!pڌCh/4B$9|dCHxptK!LN•bqiicxJ ꅏ|c9eZn`f$KUi\ˑ&K^D W82z g_tF4UqގGQ,/zlssa97iH\̓܅b0`_UM'n{CEm&WS -P-GxOZ˗*~~=M=HT! 1Jci8͝0^JʮfU$;a!U @ELQ+V12ȸo1vwv~|0 ЙȆwOmiG47pb׀D PS 6l "4J곷7kt2|?Gܯͥ_},Yj_;J>MKFs"5rˌ:Qe<Ie@!eߕ5gg YY/R^@Lǀ?`@ehjE!ظ)k+. =8A=TV vCNha~*e&|œI&jw{RиPArw'=<z;_;Dv<;_0g[XRDkHǹ?^B+GƝ b -mƽ(a\qɈZ !#pRQ"` baF{Sww_ kN+Iv ),8&!ĵtxNs_ AjDXs{Xf3gNɻ,)>.vX E!5[4%7W%_Zp8B3P?*ELBi:PK(e[aYK'b)RTis8ʱ+ٝVv ڟL`Z\,0^3@|10]tLi$}t#j ŠTi M5Xh$uAsEJxn9gTpr6oر_ -;@b, Ba}Ȯ3B ZrdU'lR”j.z6o@+Gu[\\j@6s\ZNG*ŸGT 0"8Bsx9+w95('`Dp<%Az%ZL\zZ듓ރuZ&Tk&!ֹRx6k8w0.΋[>z,"+o||i#LjL-İ;k #z)<`ՃD,_kP)ikCW,]kpxE\f`!0ip9>ſ,v2)%>I>G0Q-TdXyi ;JdMAژƴ\aNO 紃Sޛer^6Q'-DG~ע`8ՃgxMtS8:rU0:_Xh<# R q2X: gr5XV 4_ L + CE3,we"T XJњT D=<,SLe&DJe:P & [Ȭ2ŻE-oD@8:R/ez"ٞw#o_G>Nι$SQDҋT2.I8߻wW]2/pc>>S .Oן|ӫ5&23?,*MPD':<Ҳ&:'DȀx*=|G~ͻ>%Z^^0+DFiCtIĽuicC &p97f\h{?R`պ [᜖`p7ޣ Lqi'ܘӀW٢c3C~&e/oCEAĻ۔R¤;n0Q&*3 H2AyPn!@2i[-M6.XHD*j:x\9& X<D)ջGZSw-s汁Ӷ@bע2عkgD|&&arUFO4C!\1&A=<k<$·DjT 8H0,J[,QV{O1nAVqnŨPb8o&j 8 [Rh.l9d:@Rql ^qjp&Xyh$k 4EV _L.CY@0Ǥq@i@7xޑԣ!6QmAZ@( W òD9)۸iP܊Ge*zAK\^b\* >컏[\#z\ID 5zg]۫GО%w7ay4;4Vsg -6{7/YyACr] 嶧"8oA ?YkBj]HS,@Zʱ%[&^Fx@_~CB4]])ẇTM?|(Nbu`~`R{Tם.uhz˷ 4?\V6ۅwG@k%Inb1c8'O-r;#*PG|d/鮴{PPSAp¸;7@x`K'[Ht vJ &DG▮a\rn}D\+ y`5%ZTG#7]B y~fi;feԿ/E9MQY'O?I_le8dv;;_va5g$93;WeI1$6ؖzcH]]eJ8Wݯݯ\0V7o">}:{$-Bqvv) l=BnE7- ΅ٹW.B#s@@s#he'j}]ȢWZ&q88lqjLp`q#`K`Bڐo$Ú/tFE:Rry z9DKVXs;W \|Qܬ=]a77PMwb3P5W ?ec`4#r±[έD-oB Ήbcar6AOܬnxJ0^#Z!!p{j^Y{o%ӣNV/,>&,r\'q7-عe;7]F[0bMѵx[hipv-(pB _.8^{Qcقy2yq0`>+سTXyR"jm \UqGq R1?TJ\@+VRPd̾O[1p<-wZ}'Giy'¢Qq>jK[Oގvc+|GPnΎƋ,UQmg$5݄_q؝j#zs-%F02\97$ǑCN#?6(~L#sUoj|:y54Cc z _ ݍSQa6!x.+0F`ȃSA` cFItZAInN_^7X̜Sz l-כErm* /n<^ξjp8-O auv..noŶbYK}9#RS55>/].~Uԧi}VeW^r+d6qdUUōkqW)S(jj2jOTFB  4Ф O CM'Y_wߓ#rq͈"F UsΣ@yOt`^\U'T4:U~:U#}|ʢeE :*ERƄ Ty C=<2 4\^5ZkK7ų7*4r^ƌ~q 4RI:6HH`Jr>JIB8EDpjb9gZ,R`{E ͇2\Rݿ_3TSbD39ÃpYnvsaR;

'dh:qDȈKR1J&%6ПH^l3iFGNrj,F #ZL;ƃsc  s㝞{M fTϡ.X8$IĠZ$hG} \fŨ2\HJGv̻uQ_`|ěX祮 ѯ`)a`-\XA֛[9M(1̖Eq!:q8ƛ`iV̓v.skx;e1:#}|=,hc''wD|Z6")|4\-`m9|ke6;rը4#zvh ɳZy FP޳Qgij&ϺBWճS@iR1t sуpp1>cP0 dAl6ֺ!ֿODZdxZd ?P`[13SB"-IV 2?yQ>HS9zJ\4sȚKkyh5j~?[s1ed"a')fbzo"O};KV»}"pЬHvOUK*ȐY\B() \riB)-$2eƃ sj3Vk| 5%@U_C50Ek `.:*/[ڢ#AԖ "](B}N(|j͕ZZ;1û1 (2 "2B3#F Ԓ I@1IZj\rzo]xk+a)*7Rk{5p9ϞRT3i8AO^ֳTfBt6&lݫ,-̶|6K5;[pe|5y-awwPFPj1&٫p jP/wqt:ݳD(ݩ@u1&\A_LkEu ^o) WqPeOhMq6 VMwAƻ}Kԍ<͡[5ޭ Mʦ42~ϻ q ޭV%SGv­iޭRև|&ZcSQSwNL5%s&9Q`rQ%3zÃ`%OJy E$P\?^7) ù$x!'%m"LF% i(||f#R)/2(jhkNxK}Fba҇|&ZcSk2b$ObuV+өw;at:U|4VhwBr)N#}&wATʻp:weػ`!_v)MK5 U6X3}"lmE0P!HNNZ 9p'(ӘF8z%3'yL hCAG?n-3Lh}k_mJP?>w9o]zE4k|y^׹_ 6<{g3uFn"n_\SF8xɤ2|K4 `"ο<27߯Mv4anڎ.gې\(`6oe:>7nBuM^sBn@̈J\K.Dz+yY廨>]+1K1JcXS/>,rE,q.y!bl4^+,"ac"׌L MHt')ڧ*D[[Ruɒ"%/iy US =Oqy4kSHSfbұMk/r0i⑏i|+f5>vO%=8 bvL9jVuETm-vu P*jXj9,uztwyRh.^0'(⚽NQ)oԳxCoݴ}*=LCV<>FTyf<-΍Z(6%Gƛ8'?m`9^Sm,+ rẞ)AhxR*Ng-]Nc[˨ Sӵ?T:pPH~ i7zHH?i\KBsBGtFK#AxAKVMBɴ* s,8Ctn%B)d1(6^Z5vwaK(!$@^OT&¸UD9DP&5&24No 0TD]X>ωX>ωfbyIq@DF%ѳI+BbVIH>TP 9R=ksRf@/"x!g#HMkDbCUd{I){o)NYZjw%!;0Jjp XZ4zie#iew5T+ lZo*^-i{Z6ߺ|mղm= ]\:`푿ӷz֔*_|YrhFOy7 ajNYf?,,\W\k>sk?n+>s_߾CmL 9Eӳ7W0R@e96߾-1x! >QaNFs/ p 6hh$ pu6aLIJ: e`֝jJ {Qfu\ӯqm"~xzsۆ|?a=޼U{qۮ/-Ģ۾kH3 $ոlf n9Lj# )&=[tKa!W\鉶^F1Y\xb]iV2㛮儛@"\1 rѦR'd'gd>wS_w=@pcZK?];nq,REUQQEu a[H742Y^=ļTjh̿W*(#{[F 5/ErBIU%1MM#q.)7IٴVnd &{Lyw5$ڰ HG"FEy$"c`tUNEF|pScn {8QhFzъic %cYND(ɳOvȥn4ZS8 kJ]ճ8Fћ)@2S4N82:j anI[$)e%EPT 4(Y)#;Ѣ ]IEsfr1ڛu!o[Pq(1Y2n'Oם"3F@l s.;CQ|^$Bd&no+ak=l 44[>[i UEN1AܫSi YH=h+Yq"y[٪ݖҸMn,R놕Ksޔ,z qzጏ|4\͙0C)x,ݬdf%K7kg ǩP6&;a$ۢ1\|"[? , "ɯ_x%?.ΫIߍ^M~YtC@˫y×1bY{RBZ*B'5FxiJCx?g9ꔲO*[8"Ѥ} {hZ0$2f%1+YIdډWРwƃ :&z!eG <(q`2dEoΉ_i5R;h{b%HxjvCuȑTH \y]9/J;rpЬf% 4kڑ0CP2Z IO T? j_9QAͷf=*SƞH!BNe rd}v r .d{ fZ JC-ȶCz8mS\"yuP1c&fb,vHdpG7'V4j ه7'lgyuG[†yZ~zq]懀YaەKA+=jjG_@ FXþb&6o}Qp 9IZ,,CAlO|v53y̛Â҃zfH'?Kp\֐ĉ!ɦy ݈2 3|OQn^&m7]}o JV]==Vl0*d%G<ML@|7u[2Gm>6=d2u(Qv+b8d=xXt㍝)6 ;l/N#5IKOn{*JZAۅ\RGYEuEM[*l~r$Rk>y7{g}'X,^4}78PJ>;4 g/kvE.u (-q@08t F |{A5{? *3^p/εϡJΥfǩ'` WJYu-9TVr'K1o1REJ}Ld{8Yzm^E {< rY@.o]G$[EP A4X;ɑ;m,^};7STEfz؝fyQ4-;҆tF{rVboѾg)]G^R'X+X[\/bNnJ2)xJFv=Vآ-ZlR;t0 Cy !XEdžCqX)q?c5f~8KID=G`Iz?3 аcEWRá.ɥ*PB Rv߅Z9Iv0Ьf% 4kФdpiD[9QzPDl2*>X$V l4+5무.e'u\~K-˟Y5D'Doԛ?ĕ[kv%~ůz[ [Nf*vbM _rL"XO)ʤ]_w([K\! 2%c ~z42ІhFu:'_I ߜSdNmξ:':_rdcr_M8M"921ۺzRce-Nː- ʸ'76\-"flˈ"ZN+J*d4 h6 :F~F$}E8G24e@Ax&c(/gܖy;.Ȭ7 M:GoT!;T-ޖ0\>)g'v.?]]zd#x9)[49y|;;ȋc#X̼mfiK U(F{F(4`)D߇LBQT!EY։F)T|t6,N [I "6A#HjU8mE1p 6)T[7kn4VC}e MUf͡!:4[6Y[]#,NeIȱ:7U e -IO[g,6XMuÒcׁ  ́ȡNtut1>ǿpJ. nefA%zdy)ǀuJ4R>%O1q5[pǛʦ7 P wnǛCt!I^mo7{_!؈+m#I/1a@6vô!iq-Z*R*RUKn1c*VF* @NJɳ% 3ggK*^DuB-ZjJdN3M FX#`Zc#'LRz Ћvxq(4V6،02JL39 p=x<=7U.7V&!P[)Spa 7\ȬF1h ""  k N&g FF/"9RM"H)/LEGRIA{ SD%, )=7pM$*ZA`jERbnxM<R U㬛aPPT;:gڢa?:|so/W׼]]lTeRYٺGdωgkjzv?!9t)܀A<[SؚҲϙg,%Q%ȳ5 C;uh'1 GH@"Rnj<[gε&5C()0^Ԗ,_&W9 1(gԸtSϚxZ#5Ap71jP%ehT2^.\q}B7K*I$kW\M}29}.MicaX/|0Dc%y."uV}xZs}=4,sI&Q6}2NFڃQ(,tfiUpfng+,-&;a_i Hq0=e#sJFϾL^^ Q8ǓouPVX(҇MKgm=cMك4gUi6kAS!jbƋf)!XJEA 3ϋ/x> t8_cW+;Ŵsu[QH긏>{cG; Ɩi?M>I abJ XV{ z3"B@@$>tJ/d_iN:+XMQy>pR4_?Ȟ&Q;Akæ^y4jwg^_Y^􋦇% c?7-rɓs[;gZk%KܝYQp*"5A2pz;^fwEwiA [9j3htVT<"ejH+Zccq^)Jh%cpeҙ٨u)u`Mx%8ڶaj\8eP~,~ qfiw#ܕ3Ϟ:-&0BCsq$1_t~ALk2]oQ#}RR$kn[A{mg8ڊr%E%t`lLYI_:-3еX_z5na\nړ8ٟc ,iJ7w԰dqVT~>U5xZB|Zͳq4[wY@!,_qPX =n<|8zrh+n2@a$,Vנ~} /M#fޤtҥOj'5E<ȳg#ϒG=r7~#HRj 8:`y$e`:Xs?6KOEUx;G~u׆N|``$? ekm|{;7"ӕcjIv?H8:♉P\{.L@4F =HLF4@??#M(Pmy3J`< Tc:+^E"T[JOfJEN6lߒ:}( '7oB͇3pE nC e&r);倕D`=%eT Vx,al4:"`重 P͙zЃ~g̗Me"1Jľ}4NƨH0%›t8dpJ3 (Eg#t>Ĥ)$qvgDZЕ,)z%Kat 䝊k~ 䒒AC [uP-k `&Ƿd&`x{3 WU ch? F ߵ;(]|({QT_i37d#15޶61wnbgK}N#yrk͵ztPCcm>YnaOӿ߻ Ezq-ry&Sy~:R.36EHpQmaщy SN6q#җ\PxQ?^\%%[* ^$$QKRT58$"BJK<ht\gNrA(Cm͡L+2CU{vcu ͒$uIӭ?fwь&R#vGEMtH'z",z-%Yt.eI0 LDeNn cp;6W.UZ)KZ6l-*Y:bPE+Bӝg4կy_Ef7*e&<)L xauyM #_~T̞&̑b\7?.UP1$dAh:`+Æ@0%2tH!J }G/?@_7MMWʅ÷IU*ki-Ne&UѮ㦐clusB|VA#]9@M"-/ʂA{G-=shֈu?Zt8zɬ ]Ś\~je%pv8Kń~rnx:44Ybr9{fo/HRdMo͵jIV4$+[GB$9L5[3;)F SuT ,dv)IX;QI%RޚDZgZ0(z,i6fֲ[k5*#+ [{󘶔 #,2)7W= ?间*닉p>8%F9!t#y^D ul5Af]/sUGȟu}>"`ۏ>ƺ'KMK⣔q³% #~ط}} ˮy]mXT5qp@Β攵Y4=d%W[Ibyfo?<8kePKD"J5am.\"ͥ"'A78 u ifcJ/{]dn[D\΄p4cLPLg˜(s"xRBT WJP%;'YL"06q,\F0c)ediŐj_?A{%V76LWO=syS%Qz`} ~ FbK;Gpղbb~>T|aɬb20B阐C&-;ٯE$]6XWm|{T஥X3YۡgJ?%m7*˂Ƅw$}λZD:"kAu~ۗȆP\xP2|u jtYSz%Sz+ռX \V VV\"DZ]YWhP,pz6?AO` :pa0\aX4`'qƼr+f2&ibh:#Xu1b'"ZN Gc1WvaT\Q6hEC, " |66+9Kc5~[I,xHaE :b(j 9?2]Ƅz ?Uuj񣯽vhv]|HŽs{bR]fN{k^`hy4QK J3%ΑE\+BP#T1ᰴii22[ZDYJp y<7XIUQTK]ݷ̯!t|ItA;Ip<;N5GY'N(&+p-kTv>}Д<@-rJ" "\jJ[ FׁʨВ03PEXSsbɔRlj 6@*]Z/%U1i":P@(Aq(GD"0&Ɋ@,4\順~SU=+tʽ ?Y{U{V7J,llQ//XhL@t䲶 {o l-Abv"_񺬘"9ywPwsk7 |3ؤ K:SWj``Q˃sƂ:3.DcE=Hx0?E1ʏ mn˭ ӏ"/ud:yvXЊ_i2};vjA\Jakj* m5~TQ`ogu\E+ܶ%NPRZQ]5h6}gq5seJ.RW:H{A#hVAu§/$ 3-HM z(zbQ/My/%=p𗇇w_=|, w?f6XVZ4O9IUNNuUuJ9⫏Yyod5Yc֊! s:C,qvxn&Eo'D!`D+՞bR2ϪU $%t@ *XW0 VDh`D9j,+$$@Os<(ikN; r;:vqG,FA UTZ.qTrjG\ѣX-iDԈJZ%!k%Pb^!7AD*RW.=[(m3h0*m8k%7Lcß0؎HωEpyD:ԚQHpͻfry7/+ɅyxxC[s:-Ȯ 3<DVN dN|k+RIA>nl~cCbw%HT)Pשb,.a8-^ø :YG.Rږ84(ʜZbl1A t+`Áh,!9՝6Ќc5iTS+pOn}@DvW!4Z\@e΃ mwo4h'Ae7pͻM'sV;!2g@&N1>lQhuG9ljNU Y&eVB02J{sjrϱzxyoư=J<ލoP]ڭ n&/8o|3m{<\!hOGc (]L7 и&/#߈ 2Cv¹´D[nrIi/]Rz`czK!yĜw2DΥՒ&շ"ڬC$_r$V3U7 [uO\Nh9UδGI[e@û2*R3491"WK`'Gz'c&:5Nu띸Riwdnw,JpㄫkujBRdVu oCϞNᐢ1GtK?3`ErAu7ZI˨~C32[{hZ/^rGD )|hD&on; Қ&83Cce/՗%87izw$7|+ntc1ϷG81Rg6`$fP$xŰED[96BBWUƾ1UO{%+]Mzk%TG4ɖUb]0y96O&.&v^0(uP39*S ;uQLh%iA X$;=ҘNh&byٵt*ܨ>ߗriyWb>{s!?+Q9.niOh0_ cSS2z gb gn* ;j5}{DZ u;XJ B9,HK XP%>֐XYTalFu?YdoXռMH%Gr}Ws~Gf:<LmڠIr@2D ؁dm VG_2QA1CQ#t.鸾l#Xg;o;E*?F;3Xq%]\^8,sx 7_omnXrG2%7F;*!W\'5';ʟf󃕗b¦V7U{C=Buf 3ag log),;%Obl_Hnߦ'I\xǙ-깐QU˫zub êؿ:\X<~wYBy 8( {Ogy#{ 2{g9# qf,_O_N A?iGMQ!i+)y#&`lo};ː pBdAel1*]&:*hՔ؍A) >lMž]nn*xEP1_ ID 9c4e\0 PA2wi-y"|JKQ=x0| A)seA *sBrH)/e,_LJʮ_fe7|sD0F.Ky&F #w!F~[kknF.e/NԩM6SKR.^@[֑d&SAR 6gf׍FMIK =($tNXLq1 cbogi)G%P*_틊ӽDWe=ʩ(j*h,Qrygw;G@us>#j9ɾ(Cj8R5Oc(w(obUFe=UT`aʞz~âu_֫t nnu]!)~33oL{R-ta/Xr*BǺS|e7HX^9oB|$tBSY'JֲIE?̱[CՐ攗6ՙ$뮓fm${Cש j0z}܌sNL:84]PI٘'Q1VD~56'$1xQίb Z>M$R7F\Vy.Ղc @0Ihu0XB EE'JSS;^j[ ޒrU[:aпz߮Z]a _V?˜hFye{5^L!XC]Kßzn+ZG_"ц.d)MҌ1I)㘳8 rF*ЂK #/ߒ0Ǎ n̫Qgy8X@10ʵ,FZn6CXmd/fC)3,q'#@BTN !JF<[T}{ RGe.vRӁg eIBb˨[Gv_R4gy <:cf#E1L4ùylP"/l,jAc#hD;jDR3\wA'6fnafMvM>47zlxOZ֏Ӣ?#,6==-f+VJ]zO~76w'3W $Ls=.2}gR4]^\`"K%34etœ`̫~##Ny[^ Z.,J1UO 5yYlClh Sٔ|S4"<>dƏHqcSmo˅b9Mu!8.V3.V7P\/P6 ( ClB&pL0$XψTxwWWN5E:0ΌA80V"1W0U ^iP.K9M8"@kj>9jBK4-f"ut0ʊhX)q`BC$ `M"CQ4D lqC=AY˷`n[s^|a=X`H q;Fl0Dd$!$±JF,a46AK^^|]\mӱH;ϴJoCokog~I%*Ri{S׭wT31t=EN [")`9|O{q{n9{ն&c[8u<|N)0zҩ&-1̤6i':@%H|w{UbJ}`%/x}::xĖ i'|su7kxz!yelb̡o["Xr35#<737E:\8mfhf|u :\H;ߍwD")fO;SnFЪzOt0"z=B.;̿Q^MTӗ"ema f7yg۽kq-Q(㒪3[3A]nw-n THJ['cujHZ|Hy)9]o1bM@wc,YH.͗6\jBpi{\a Hm^GJ)<8 "ܓ64f\뺖ԍw:Z "ՉS5ĕDkrȍ%/Œ8F+@?\OHг;Rk9O2g{Q '+^⽢,ϯFa^"+5]yzLcu9-j~i= >ndo4}Ɔ6?EDC||*贔̷֒v ,B׺ުuTӗ4m_rh֓Wݚ|% y&lwL_[S \L3x^Zqԟwk~ѻa!oDTȻM#yޭ).u =Szn͏;z6,䍛M9(fl1|'Kxg֜MM`g9系g*mfAzw5=_L'E 6w_ LX㿔yZX%q?n *ߣ<% $% :P-fXLt8]i` V)$؏3' ǚn1<0+`Psڟff~#"$PB|CeDmMeK2gCs႗a$9r J/{-ނڲo})>2zu/{z1DEʑm _ o׭̅<]`=0ug}Xή{YޝUvW8JW#.2ŰnGawa\@\ݔy:oy\qbgW >1M?>Hؕ$jut*8Ĝ1HI ?iFA$%'/RIe+0 (iU]=bodC^Z! 7 TwێN/dT01~Z$3HIbY2dB3 Bw8[7!wZ/R,N@_*L6v%v&weT!su3et_ٕ3#x"#73ڵrq3On8gWީ ї Y{.՚;ר1"Т=lz`[cCq=F`\RMbő[.$a<_|= WTy(خtvJJjWmdU;ۥ=ha͹jyXW FpCGM2m;:bg@g|Ї5-Iy :JKb|8RG1q&p>%3Fϰyt!{8.WH7fp^yztٞZ$\bh;Q[3zZgi]~,-3n+\fi5Ɯ X!E[U~WY*ٖ n/JE0[>)B')}zCM꾼*լ.\SqsH4QDn--R vnⴣz :Als[ )X9ĉaE0egR]lL)a/֎) cdj,|Y#rP}KiwFj cPެRFc `!*'tTTOJjƖ(60W56rmf2Id/p9~Ss^V.7vrcW.7vrS^ DɣO02 "2$BL 4LJF$c7d\._X*.^]z gૂ)-3fÏ6g>Ty22 iﯿ¾ۺ3}EW]|MGT *ic ]] Xr&\bº1Q xW45ge4cU].6D Jgvd4q˲!5sG.̌4ڌ-f^_f~`[ɳ3rne:[W`etˋ L_|Vw&w0S/f^჉6dټ?&~j,GϺ,>&s5ZP鳐= u e>$3v1'0xio˅b9MsL̨4!.gXkmHy/>]ҊTT 0"Yzoe.pïMQ kD2 h(A YD#ItB& ҆&@ D)R_k}ڸbo*V ӊ;Dө.7]xRقZ)dƒS9d=0gq][o7+dRdۻX`8lɀDi'9~=#5=3#+ {<&*֍uiZ3 xkrfd+Jff gWWIea5> 5\OE ?ϹSRߕ_vՌ\8 r*W MD9{`L\\7Q>)[Vp lɇ>s Q+{T66E91epxE'= >ykRZhc?fpYh"I >{4m>8fm[ץH`L܀mzJs |T^`Yo?b y),iqg"lZ)z[b"gx b@l!7~j`Hf4@{=ȒQ1jl֤:%YA2H%=Yh4aN06h!H'edkhG}DHiIHБ82$ dyt> QBrPĊgE|:[J[v2޶ۂV"}&>$Ӌ)9?E7Gw0/@{?S^C2Υ5{{Y 1@}.}gX<\v9Ι&41]_g/sg.₾_M8ɇ[`- gw﫟yҪj9Tl+وO3.ɸ $%UL Ag5K*Z %aйh#(  5$C$d\'"f &AtD:EQ|/:;X+#NgĞ ~;f2E!@,t䒈Izܛ@KW-eZGI?P'QJ )ĶJf-\V)sTHgyQGp6(î np\P9KΊd=1@^NӫrK^ҷg}?gTu\,"lLMlQM`s+qr y~yZxX&{St\?_u'L2)_zC1Z!7B㱔$v79?i}Mּ㹐ˎɳY,bTl[=^F@6R֕6mk ؋"c9 "Jq$n:ф+kZ{ҨwTRY>HPU s 븁h'f#Dɡ(,jc V2Sbj`dAxF-LsFqƙ`= 0cihnArLS:32?f*FF9%}q BL9@=Kpo Zzg|I۰hej]5şUueFm3٢3FjJpF;93h]Lm9o e1Jy"# ;g%+5Yv|B:%NY(dd&F:$ d.Yl&ÍLɻv C6Zts*y'J.af0_Ʌ]NY[Jta Z Wa$ǫE Y6H jРh,X av`5ʥ6J,/R?_śNEG-qX`]>/WRHHx' ?q$NYV9\ɚw'CS'l\e2TxTj}a%UY~xd!?o)Ml Ŗc ~iCطoW=S-Wl&Zh%߰R5nEW֢&b%-mJba)ci:Tkʖe]sd1madw巹\jɽ0 ;dA@ FDX!2Cpk։ QFpO$v@m.=/!jB_49p+qQyD3 JU1{_# a FU 0+L+{r=yZUK*{{=y;pOaNyL <lҙr9y缱*1LpIA.Zw'ڑUoW a{HF'\ -GN1G@ڂ?$[8mݪ^-vrvo'~.5غz+4Ld6'YbfF2>5︋>e+ [ŀHIYZ?wdKDf=K|.s7.BT~anx4Ldȟ_Ρ%΢H+X;sc4Zu~hl_az1TL%4ҪP,#'lrZSn7IuFwn慎hb=O0"6kjrUޏYխ*p3m4~ f]6ka#YW75bnŽAf|LS$B\JD1[&^cbM" 瑣J| -Ġ2q`",\3hQNñz—۞^[)fԯ^3CwG3acԵv\hFQXwϠ(2G JFFy)Pir]n[3;Pt+m'wIIC: )|Ȅz<$ٳq"0+b HgU&/CB-.&ɡȚڇ-8Nܻ5ƫƦK1֎*KOrv̀El(aV j&j |'LkQ1 6Y\ (܀X~L y@3)l>,si]Pk5'+{0R͎Mv=V-` 7PJR ϕ]yg f#FnGY*_-T5 GHN/x;M׳wpŒ0$#[šZ?Mvq'Gߟߞ]>_?͞׳ow~KA-e*n0Rkr=:W/)ޣ2(E ý; a--7:04|m-n\4K#26uM%q{7D}͠"To@F1X:PYrJ&f Ě[&(a;_/d~qϬJP{sէE~ϿxTDe +ǯ~uׯ_8ݛ3ʬfڒ{,=aDKU[\uI~)B="\1bǓ?nP%ãwNJZ5Tmh~'XosmJ7|շCxtP(0EG qîIgղU{Ȣa[̷&e)ar5I6_FOjoâ6ʊme0{/7-$8痧U@oU4/^;Msw*\^-G{9KG1s=Gb|-bEoo }W/M7хDV|߯c,GkviXDo˟4DzB>Y-#DJc4mrvoE{Q{`l/[_ jX;H8'Gn{IUJ!8g><ř{yYVoҭ/5]]!+УIMj+:pkX~;P /wsiR;T\~;Pa_ϼ񨸭zY2]jUǬn <zT3hEow#=LXIyjfZnB-3`B['ӟyºͣ2ZmctݪqV-WVm Er Y1s˜rXr)Ĉ>/~i=wN|]Yr>ZY~.~Ng>|iznnN/>7ߎLvHl8~+=3-QL?fD}d{:)ӻ8+Yhק=C[X& h7'e@9$uaS2Gj<)r _^ަPrUlz.4frwkX i[Ss rb|ښ$׫FFݰ=k?&ǛcSS}L$r ܜHH@'pByg?{ `O8[51ñfsU?a\Nw~e6nEK |g|!k[)HC^_NX4,mdC j<:<~#ҏKߋ v0f<@ӄL܁Ź)2!dT'ԲOg~ȑbeﰠ*`^n/tvOAٴ[-)vƋߗ-KVۊb'["" '%PH'"JYVu@@ͩHἪIznG(RJ tZ:OAl=,HJqQ8+J,"i*|-])˝~OeT΍QdɤZm!T/eA:BU 5X+Х/=RYz艿d}C-R՜Ed҃ .*iuV ܋*- , =~V|/Ye3MυxiYZo=ZɅt;d.E5݃!j,02i:^@ި=BAQc!@)q`Q}$y_W :(xݷKP$X=?4u>zp٩_#}菍l}²5gcD +pCHˊSYZ$s.Jpgf56LWqY IB,_ gN21W ݨLvb/*|*$}K=6$MBzs>ǡK/)?Sv%JBgŲ\\WVjW;_\O}GTz,u,[,[_P'_X%5yKOu*ku-H[\!,Gm 8g+|pgo{?k;tg Ews^rkqns}E6luLCN`!z :dɠ Ys,II-RKϽP.=B!AWdpeW6ϳщ:>Z+q^'~u0?׿}>Tm+&YGI!$~I~?cb ɴNOo~ Ԍ '&|35T}&ZĘ:嬨}j#>O@f_xNxlY,":z#Oز7Ucx3\D&~l[v׿Ws]zk% ˟( 86RK:+nC :e-1˛sG1`ٲ @GJ m0r"#Aɣ]Y`y?}Cx(ۘ흚z`Mg#0\ <[v|u@XnL.]~GDUvbbt'!О4g0cbhw2^1Mݒ *tC $,jO׳wuee}PƏ(KF&TU@m ^q[fn2~`QB"/ͭkOdCQ²*Z NrJ"wD  dp u1++"kPa]yBD.f{`-Rxoz΍HarQʊKeU,oG[oWr@ل;J+34$0n(d6kUvi{bUJ8VY2NƋ7<68I"5=üaxv 2r,'ste2 b?"DMk6yq3ĐܖzCS\*o9g,fbó¹/WW<`Rsq+/×"hO 2d/t)L^XVfl @ 4$'ʪ{22x uc2uH@ӠYc3J4f2tu-ly~>JӜ>%aNJpSq!d0A@V '2/sPj-? yS5q Kĕ%1 -`U/R9 %y^YcCulus޺DZ+1&&I+{}Hsz2јpMp#{N@P:M\5&DhO#fAbo، 2}mᇠP4Y'7ID ZL/NKYD۬Ե!Q@k&-٠~T?,\lPJqmajqEg& ėmjaɩ4?~Y5n@Ƣwe>0tXL&Nj.U(7TTkzC`:P Z{kVP}@+56$6E8et$rB;㎕ɤM&:7%|6t}V< ˭/_}="o?;kA@~|Mv \j9?$qv?_aگ&k|b5z|X7 T^'u\]ͧe\㹈݆V[R8M"oqϐ?'|ʲi}^cêuWO۬O3H\tĽ-d"҅g^x Y$ċ"x4}w^㉸ݥI;8DޤXa zϐzp²ݷl*ް`$gQVvډ*(+56Ds߬l4*jU׀ҕaN#0 dVV(+ad{e-;?hlpÁ' 3FFIL2u)9lJ9Kֈ&)ߗrYM քua./z{.B7:q mt@6ˉdIG#yQӱ9_/+8\HUUMI+3QrLV%NVYeuC.Lo-M݂oԫ)*"e.jmwisRpkQl+մZJp%x//0@/R@Cٵ?N)qiNꉚU~ ՕTۤi\ rŅԽĝ VTdP'qՁ+k<0tpZ4 Bx ~Ȏsu隦r|gOف*ƕ77Er߀# eҳ>~ /wX[3C@d[j.yG!Чg:&c ^E;6B &um0dL 5^.jV$S'@٨)lL( =NRމL:ۄ1Crh2qL%-dmt{M'$DKyROĶc$kcHR۴_*.^Lm~֋T[ ^7Fq1$ŭb(hGdȗ hj*yP*VxY/6EJ6$R(ZI:%[lOxTMNF4m&H,օG}jbtKU2828Dܨ7K-^,~7S̗ӴsUzWGבš臘R/HT*$__b z͜RJmUT͍is~I5TZ/ 5@!"mH&#>!v}5uy$ St{wKS| BSf(<xUxݰF/RSJ.NfQ\m {G`MbjG#K)`=YTN8aHRlI)[ (I#>$}w NaojE &u\/[{[ I# e3&ɴO4eD e[GgN_q8Rpe \HHñƻd`@!"͉ i PPq mycD0HLdTfL'9vPJ3pq=<`|-fBiK% :|pX+LjR J6%%g9 `l3cl4q"VӀ T`3̈%(*1fJ8k0b!aT)5rx);sVxD`Ȳ;Isa2'a$Pl6 0f#Va*=hhIHG R_=&yv|M5;_^8_O:>M.J)c2w.ܟ+Դ>Y( &Tt:yT^ KoPN TEVe @YSͅ*"UdjISJoDNZ#H8"E&Ѫqa0m6# U\SpmR4]m(Ndye+ZLT5rXd7W;< ,2,*.R)j^ RQξ,X^εHE`(5ix寕J,Ss;{??)+С)rSkooif0T&bU7Wb+PxoD;݋ە6F'NjUHN%JPl6O,HD2ktP7w7eRMj{Y*sK4[˭!~@ Pxz[n`pkw)i}n.&/?1 =u[ZEEqiM10%P0u0\8|t㏟_oe`R(=],dg muorߴ» y6r26lnpS>V? e~'4&5t}{ecz`m"qf^M4I3a4-s330u8}eߨ54Ӿl z硞߾o'/Lg VМIڻhJOm[y6@?o]'NvX5~ uI1}0L__AZ\CT[nШ|Io5룜GSAW- msy|z~Z{>:SVOA^8:pm{j)Z?JN=f_3փ^+^'޴bp'~q lenw-w؋o0C?$AH5 /۹WVp }>t׺hçG|ywS睝 GonwAҝvfߙHjgxwua\0u?u3/Ɯ9;遮>~8!;w7GG!~ւuF"pwCPjqt:߭4(;^~,OdvoVJl`B/Qyd%(>H`&IAiVpIRbgLa&VHB 拙*$X! VHBCKxLb" D j<? c@ҾyeIkZggp4ƃc [y\V*5n N(u[k-BO2IS2+[K Do&Svwˏ!wXsdh5"c헫a TGs^hLJ06>puYc286:6ذcf >6c 8Dj@3iSi y!9zOV@FiZ{M\ 12kSծUj@C,e=XI3@kΤ$fmp@˯҃:wfIϣLgH1EftDr`}YHۛNpgfh,#ԝ#::Lђ(\š퍅d'Q8nuiXS+`> 0 ÐC9BdjbY崩6*|?6ӦrTNi>MYnfb\dɸȒq%"6.cwA7p)Y)c$pn;@.hd% ;V P\6T7=W"|yo Me;x.җ9b̅45J7>| ي'SA,PAgU a31If#Mh,+, Ъ,q НH$j<Ȉcʨy*FDYo}j)C *8^.9LPMpB&9>pŕ1॒bRFI-fG/(so%D!.,hc*P1T{U*DuFrT;!AFs0{*jtҞYsǑKQl4˧7z'~qp7Uq4HX&dH8Pt$2̵)% JDȘ?)ekZ1(u*(87Q4T8Mw*sKER0fb&NMqdb+ c`0 Qod" DLI`H!Xo@aO)6 ˯1VAe(Cj@Ud@e^ȪqLщAӀ$XktR^bAcpgG@}\Ek&]i][6i3ːX!` [d ɓaR7"[6!QŠ4\RC[kS*r&Fg/y Cu1IQP!hd([GuK JEL7XpBV@iW=YA2qgURw`F^6ԗoHs`oT:S aLWjMΪ PF)v\9ŎsJ3e\h&ѪOj7j4vzTA=)iOertiv"$'\JuP $5)Tͪ8 !nE\:h6Z$˨H`pť-7|ŅÜ?UɆV6pΰ槕prM 0603NQDcE},Ȃv{A@M"2f)uQi#X߆R,UDFbE|:2} Uf,TjGEqR7qQf95<=.KSN^` a߷F͘OTi"\́[׫ꇹGZĐǎ`G$ڥ-bcD:PikX$`FP 7f{]Pq8=>eG:UOd秨ғ<ƍ]qW*|쌰- ;I5)0Oi{, Dx1iokP`2v[Sz*5ce<96Kh΁庱&L( =ٚf*[zVOCQ_Ju@{:xcǓeQ~Y& Ʃ6ZVeYLt(+,"njwS"dYLs˲Z nujU"yEUeYq@CUqHJh FRf#-:[ -*Za.[> ,ݖ $+cq:PVԁ8C J}9dlCH1)mΉ3 ,3@QYp{ʑhi[,Z4uSQ9(@}L9B׺F8#랎3%,hFJUSwpg>bS#0 _ic_ܙ0WqS]Ġ<y ́C ΤBg8_!x{D}dUf-o(ֶއ >%$2d[C=aK3yΩhDlfce%ra{:|l3GL<vn11GL#&77bbx/!}:j_2qTA;08a}\ywL㽽ڧВ181M,~9uꋗ_eu}PpTkc2gH_~XkE`눓e83B>-[?u\0~m`?VRuWz"i:P_,:G:T iXPE4uCcd"Yd ,e~U}^kR Ub95efk̢,z͢,z͉͢^tj^}F ͋0~k nE}?8@$r!hw$jk-a:)eF> ~H9@FTΣaأ`^ׁƊgmqAܰ9q=inChyO/ -)z78^=kV{ oukՅ|B@JYL|C!m67ZoN9@Q9aFr֫Ia?)@;XOcYꇤWӫ?zWzԫPRamm5ַZ)HJH5N:9J Pt}f2 Ez|Tȯ_/;5;tfGB S}$wa80Y1LIH,H!"[\d;Njd4ͺނT1ĕu$*&#s׺T^Zh!xG8!.~m ./j "D=$ODJ}XjSJD2{Jr pciXhqKj{3qHAYYSsѹd4pG[!z($Q&0ס1Z[CB7OwKz>D(n7L&үM;t[چPSͦRaNɃ$3HEކ WhP^KM^|xkmObyx%~HoMBw]yx%s褳 XgCj~}kIouҁNjj;Rtgmљw xz|m?P 6{>1xhG2:NB^囍Ne B8Eމ6гyFc6z P($}6dfyfG_џO5}}W؋FoOhɥЙȢA񝣲Tj߼PTR2(N@6_kzN3b>&r)Eq(%taH7s>7s7ϭN6l!*N.K$)dûضM! ![(XoOWF8 .xzB9F7/BBv)_{K}+|IfdTi~luI|`BɾR7lb[/||(1.X-?)\ݿCq@Qc(7cJ$l l&Dal0c_QFrY>Y4.car c(^CF$o 3c}INPbֿmZMmߴǯJZ&j@\˥,2Js?L4 h8 lT}&: "߇`=:%Wt6V+(u$R&J' 64ÍƢi3{xN|kSHR>}>fT*,SHМ" }Uj9󀦠22q6QN$N;فFlIR:N(bfXj?:Ȥ97fJhѝIW PaϮ7UW1N-9l$|9:A*wH!/r(!Z3 c_fk[ytl_2C|yQg wLwڞǯJ8˵Qg'HGOԓoO^m47'{h[ e!SHG}g?lph9wB*;GǤ#?Z~{vbe^v8Ci{~ת ^d;СYt T3>gv]Ek&H%'IfL.S+y(W+,fJA <pL]Вr.)C\PEYfحg K2\j3|L^N¯@qNƙ,#"C_?<2q?ԿvVnYT8zMw4:Hg_Hb"}@<=hحL՜g,BWkcDeNM՚ףֿj%)Y}lh}>Ū=7mݴzFH)wvg3uc{Jj+3`'v `yIDUjGɎB"=t-$oIk: U jwb΋:0 Y.нW\n  .KS{eqʬE0%i%toKfN[+aX\Z;.-s!qInv ɠ_qQF#&kĥQ7Hfg\B*/K^Qgu[2Zv]qP‚YsqG-8㲆:[C3YER v]@V^Vmd FP_VGS *Xee*6)2dT_ C,\MO\G{1LNK@d3;Q8eBO$0]IM @Wz'fdpz`IhXRS_uQVb֝}:6rZ^=r-ʹ6V(nCJ:C$k&]V$O8i(8s^J7eCpbǴ޺h+9C$`{+ّP R5zFe~'AJmEq'LZ?)ŤS6•5n0'i25ƭJL4o.?"(u%O(2q|?*IPsR=Gd[tɱ ,(lbɭe\7mz%iЛO[3_N0tRKi}LȜV]NɈ!B!]ժG8Be@',Nc2&Cөy;.$J9%'MLdw?] צ4{*r 6! FK\(Ri&"[U SLC@pB \(D$Nh2iTU$ˌS\6W匓YD}U޼Y!R25^zƣ02 yA^zR(2YUj`d[Ƞ-ƫ'N86\z ] '8' BbI"R %iU[$NKR]8IB=!pN: QCqҩy;/J*X#&Q򇚤ť{1bq/+4m|Yu6A|Ƿ3"g<@&uQ (9̤mCaCΉ lb>Mv4l@e""!7o_A^zڥIP09k:y. #vӵE2fRۚ;Z uIᴻFX:7lzW!zj-qRiѫOpɾQ+g A6XpHD*lB_X nG, 8m(N*s>5iaoɿ"=V;77ؙ=`vf?\ 6N4qlǒ_j-~X p`ؑbXUNL*1~~"+ǐJT˙U(9$%eҌ'KɁ)EKFw>YySzo=pєZꩡݧ vaxdvs[]PtAU@ Z3܅}М ӌfWEKd,oȦ P0ΔB%]|eU nǘD $vEԼiǘ'5]]kPջ5;eDguY:#cfch I."AC)6R1mMB;ɭ-\RE5)^bm]{6- C _[6Sz=0IrJI7kJ0፬ϸ//rS& +1bmʋbh@4qՊ;]kּCo㢸":&FNX cRT% [L 4d7)c\Uu2 y;Bg7|~. HZ5\ԙ ;BǢ1 hjNPteܑhi9YT6YY5E#r,DkuvަEiס\r`>Nf] d JQ4>[S RQ _ DFTm%t VH\#[:Gl͖mX:>ᆅ6R/.I5ΠrLזx0n΍0%Ɔ/ܵ-נ8s";F["QM>cBlyަEY;[*l  ͪ}ragJ:pi#ڳ6U{TӠ+p&6Li_fCu{6-Ȍ.LU{2.W:\)h WJmJŨ Y1%dyJ7΀Kl1ƀ#)M@Roϔm\TfTH~厁scŏj}'zL_^,&Y~M>aNcMt[}<SapOlM(zwmh`QU`Ax +clN"\Pܕ*`Knp*KwA$^ Oo<:BGz:u)#55} m vˣi K 7NT }l§=fUfS}(EdSzj\ ׶4.M:gW%_f2&U0G(sݍ)W2 Vdc[MZu~d#q'c/hېXPBW/+B=l;,*~cRB< mjǼ ,3ȄK_/E8/yZJ98lBEVC~*`Ozd9%p0ץDIf6r4T̛,kh[WC. +vb<#Cc !UIK.w؄Ѱe C VUMCIUrvwg27vxoT+f/_rtCuly~IXiPBs˩E5 ^S͍WZ BӢ9!nn6, 4<=V^s+Ė 8/ɖ"|`JS/Ō _IQ6< <'Ѕn-$HFi؇kdlkEnӚ,)gۨ)spASRz*JSYWDֲIP%4oVfVRһ&Bcf[68'L3eفҰo*B;~キR[SzR)dԓe,|R](؂Zy -yPr2D)Jޖ*=)%56p#120H阈EdGmݍfi0J<|#LBπ7 9ęӴZNf4ZS=lW V M%i%۸(|p+Ғ3>7Umˌv `yeaL!yJX UrS\@>ǓqoǝqqR&3\| Go1Q5i1ԂMAZ= "TCf5ʹc gKoc/:2vunw0GcF:XӲ[NIruF:\IQ=ukmu.CuZ_n40h9\H'^0̳)r>p$XJqsI!vGCَ;zph`Bn۝ 3[MKnX iFjc9z4/seQ_ҔwSwPzn*n[|TS4y$VN$B&),/CI8_8P%f"O2r(j;C҆N&0`[S7k%g<=b6ƴ>1[B8ylϿІһBUh^;3 IW)(dby ܦLYN~L7|䖉󳴪%6tejeNS@/i\\k>~?Iuy]_~v91d@qy~(E05 ABePhO)\FbgDC J{X#og|j)9i d s-bº#9G9&ҳ);jMzu^Ʊ vԎ&)hLwt`kV4֎y[sSݯ[A!޿-w*l߰R^Ӧt~: ǢLE䄤䄤䄤.%+y.WTZM:6 ^.QʤMH M)j7:OKԿax3fgJikh&]7z3iY5(viq(RqO垓TtY37ߙVCoq}wN\S;]2hQ%%'SR4k+ V}LUR-{:1rM$׎y?RM9uX{z-RY>D֥RWZ~`D͘9Uw8'@־? PR-$%D@ݶh6)po6WpUslsޞcF= ɢaz[ۏJ ffM(z0v[ JUGп$AF96*ؓc 牞'o-|{Z=Ls ɠL4څ Oizf$wpX7kO ;IbG qP2A%VT  g4JU$cfݾJ6Y|6݉nUY)U}-peq؄2e{i 1BpfF;U^F;¸4Ri&p԰sGQ}(- C5O _:3KsKr_' fؠTEq, PeԿ80C@\V*o!Zɥ20ޫ 3gG Ic^_,~/2$69$/Cb4v` 3]Rh6&ꭹligh&gӏŅ_|,P]f\(C53?%8V> +c:,hBvDeuYu WUC{7qjٯ]]`365t2 X·҇D5:gb{6ŏ>(`#URsM?/m0Y< 6Ԉc3myGn>F41mlzLc{Fa3ʕ%|von.o:,u$fr%&z}_w'?xMt}rvb:ʻk=N;BmEgy7u׉?oo1!o__קm[uSLHQs[笂(m^-Vye yO50&`a1irei{qߎw3}7tzRį[uWj' v>Pb pċ=)3z r/6# c;3~@Fq _n[4Nk&f.4XaNt:C-ziFsn9x^q`A!9wLeedIz%d]mr6}yRH m *ȇA |] ~޲fC%S{ޯE͗ZAzqϳ]p̌܊ᆡF@ ܗΞvAmvsh;Zwy b;WAP{'N,o0R[wj0bƌ%;dQ%(c÷9ec#acmx!F¼ګƗUD FpL!n`O%͞NyxqAN|8ffϚ_Lڏ?Pco=ҕܫ>гF=2~y{`,+{QlƸvk<a73d7N& C$펫{|+]D*m[<}υrn8 3/k?l%pg.T"L)\4yF1m%wy!5z%Xj5(౅p^  !LC}? l8 KhDksTj6,q˅w;zn.[9`m#V&Xz7ӏwO ~?\|5wۿ4c.䇶/p8 y{3|U;ֻjzWX;;X!]RA@xf@EJ.O9v]9V]vfga>Hon?h}Bث-j a 2a5 HǨۅ@Ck \:kP$ /7$pl.4h޹\8~%y#%U @ɤ$2YA K7'V®,?~Z!2 \/V)GV Bd  =$"⟗Ar;; wL8]e렣nM3D6o59 fb,ۑ,yK ;k8XQWS"0[Tbo\L>QS.m[I\k PV0)0^ [2bdLN %U0brO:=vv]{쪻Ǻu0Њ*>F (CPi' H*x)@onCX '_3tAu0C?|*М&2y=dQ֠d}sa{I̔c4^" "G)yiRs'qB=T9\c83I{W>,6J+mjC!66`ǖYd-0l84(fbsEgGQ/5:(#N7>xL/ט/AfP_b"seo{k73![{mX^5}**,2 -yk)ު`8\v'W{nM`B(=:chێFL*l{V~:Jlg)B.ckzr&9ظ'Dt:3@^ ߀-A:_m%H9+|w[X!3WaB95K[UZS!Ֆ5D&bH{**Ǻ.,lcAjAr'ϥ"`Nai,'ႤwAFI:^[eo3c1an.Yz/eRɍ_us4'>qoŢY.m̓3Kv{e:fdO@ 5f4RrzIc«ӣ|Np౲d ԕUt3y}yu˲"ب{H?z9!)[SHN)`HQ>z IY)"op}e٣mJ^| $&jb'@jۖ5fOUɸ9 / l-Fe *[bAC@޷\ ao%@mT*`Z$뮴X}xa̪@bk]) zrT&r_﫶\J>Nj6Tcꔚc]3JSߺ? RZL85<%|J:%yIy2XOGXN-,/2x9a\cܰd4_F(͝K%,WoiM(䈲 5rl<F#:Cø A%[teܻvYoC1:Hv}u`.# 0z̍cD6;p8Lm[%CUφfr,DKPӢ@#̞F*q1Dmϔ .GLܯSٓTO<}k k}ڼˇァȩ)\ZYf4r^Q bc#RpO@;S<ȷ>) 5\'[q\0$Z?$۩ sV@xȰH9%(K7tc1b6Z6I + LơԒuM+Uc ΄}3 jJ2Fo j~zp ֙ M#jb9IՒV7(1S.z|c8~2RJy SЉ޶S6d'XdS97|A,%6yմVj+<C%%&^ $0f9c/܃Z!>;UMՃ\iEŐ)TzPܾzy tΈoto@et\nM:_ĈelO:%3Η ,m+v7婳%c(ٲ| rJ[PjDpUŹ S i(^`}uyh ]|FVwr(s~lzbS,邯dBmF\1}2׷l]/.:U#joz+L:s#r؀U-Wrx8ٲG4@_I0Qט7*WsۂxF% i}{V+]BmG:Dy6"cy.{Й{2x4㩑.6f>Oc:y{6"L]?T,{O@s]ea/"S8$"[r|uނ"n+ d& u%&G(5el{4[Ue'0)3EnA_f0b$^8hn͠ (W.d+"8co,mЊ!__5aͤA 1 i9wsC2*D,ޫxѐIr5x5~(7J. =9bYsk-YݶI5ޘJTAN4j+b5kb܋seύ֚YĵʪDV(,YCfc&;bw}߱7Y\ৄA`;Ö5C'S; {E1G!8(7:Ρ4r ~Mrd<}ypȘR65jy>p'm|EvŎy.t?Mp&v;s& :S9o8Lnd?5#$ga;ٲgey % &6SKR! k| !D*b { GsA@S]^8a*dD9ʜ;={JNՆλ7;3%&)x6tfQ/r ! YA,{j?).~~ssU<,7nT8ţ/w^\1ՌjFpˌYOjq2Ko_η&S Ox}O"P2Uu֪ @ECn׮P B(G{sSߘ*uG^GK W숖Zjͻ)mM, #?t)r;$ CJqH܅__\3ieL'(or ~I +p+5B68sz&Dyf0V)S9l!Հ&$حTET6Ʒ.c'\L3,fp)/?M|=Wq(}̈́L.%vgp^>=Mv:Ǿ bVKIS[ی6OUn\%?IAuBl6@oҋ&dh&ƚ)n|xC8yDʄ7kYmK 5KMkl) K#xic;T+{$Z*E[5uṰjX88Wƶ`΀štZi.4E, 1*nӢ|og*Ef|Eueh) p*d@65!B`عtnඎ{UIޕPZGI`)V'kZNsXm4%{X[FQ"ZG)+HPA(0PlP5(d,fzl:{d<ơKwOCV# (]vS. ڍQ-ҫh/.nj_! $T Ie;jb_rƨtC"K>ˑC(`<4mNtIc: n !3Ԗ}dw9[ |O56J)6He}Q5= p9&=+aWBC3"Ф}/ޅC$E|\hJۙ }!W?uՁ`DYEZ.Ir*Q+P˞]daQ+djdZ^K_ԱL iY/qXN .:S/=Df1W>:Hش҄ `l&+`sg[S`"j1E\&~LC]/I,<I3隷(xn%MZ F{I;J$n/f`}6,N/'l)VPB8K>5b y)fa˻l9oAo -IliZײղ M[sN@JؙXBWP껑PB+WȼIS$rij@q֓xVIƻs(MK'?P:Qq7eӝN~W~2Y W7/gS+AD캂tZ:5y["D.%? "mZv)WB*lgmݏk.Ҫ7BYwࡀ^ƒQ]k d3 Fjehy߷g̐\fakd=-C ;Rf^4hPcc'gplDo!zb\=cpҳ3̊*}A8ʔVԆ5νe9Wh@۲ ̨9ጞ2 PI*Y_"j¡4箨$-Hd-\VӜWbg7U M]m)D_mL AIsZO޹Ҧ>qVZXܶbyf\=sWr&)DGoչͤ.6i~-)!a- [庄zqp+ ?=*[.wNV範()o^6Y1[z.UgM˘`R[vRs"(Fal jm!~|KdCx(`Bڎ#P4Vj*K aҲIܘa0FD켻G9xr&gi=QrNqhN':q`c21JO=הCe=3El] ?Cy>3N'#gkɽ!xgad]@yz՘Z7/vӂX쑩!H'Ģ+_|I{l|Cs5Sd Ot\]3W_ V^"_o]ImxC7S{'`1 xxnTnɺžɈ.%w +c^6T +/m2bfKo mRf@UMդ1SC_s}Z{(r;po"(A2#pE3?%3OEGhO줾9(";;L?;2awOO.qFbaJ7=|[ C.PPptW H@[1h[RW 6Ia)TaE=3A9] ܮz[eݮ4 55ek3vHuVPĠ=/CFؕnc T{RI@>\ IPWRq"3FAe9fc(HhX{͕F]@u&M;k;̫g3Snq}SxuJhI; %R!#XSAQ&HYwO_e$m8 o/$eaљ3fx[TR 8ja FIoY7J}l9kzE6BVv ԪPaΈl*몮 Je!6DT6O-Xk߳m .{ #.*$ (KH+c((x=mD߳dT}q4}{ɰ; uOi>쌼T]{&n_$ʽ{z٩>8֬kx&qȜ4K_w,84g]O81q! Kh\I䆄p !&L]b3! xA ''!Hԫ$Eic>G[$(yAA ,P_hr3-};|>.h&qnsh85_=fgS|t{d g8ٟQ:'M"wn~W7|e<~O׍_Ng'8ۏƞxٿ_~rt<;bl8yYWp4ij39ϲgyb¯S/BkN}?@ƒ䑟lESŹwғl?U2Io&L_f)}\]Uۨ8-\}z}>Kh%cZ:8K\BDJt1 ^ !$Dlw3Arqx aYR~T]Mq Ƃ  cj6*2 Gc +&c ɐ)k@Mw M`I8&a/;ryoEV?6Ga}}۱~o~_G;k Cٻ6nlW㢷-B/ܠ&w ɕn,y$I!fC><$sxi`3>ڠ]1[45`n*L졭fu14.mHon7!׻rH%l'lI%?hgn؄?>Z׈]]W_te[״[s{ۃ!bp5ذQ F$&BB"b:܉(y/W3&MjUMTo:yukIi_>{r*z_JY JT%_Ia**^+T(=:YyUIɫd*+$ H$BkQk嵃YV~8y~l2kDZ&i\:_opzvjv )<`ڲq,~8(վe^zEOwq7ϧ2YOݗFo|$ wʴLZyz `Q˷0:dOn'qyw'ՠaVv>[xsz,0t})jS)K>i;7_#7|:S$!FVQ{n{QR]yZ& iNӼJ`a 8wz@^^c츜Bhಔ!r:"㸊)l4bߨTCc)6yhy0K@Puyp )qSF.I˭ȏk?u&|y@1+?Bv+]BGl0ˎj ܞfg6<~kw/?ѹoH=xdԙ C?{yxmZmΪAy1XNdc9;:f*Uxg';)󳟞o+`l-1ʼ}ThtY>BVmp}?uoThI$rmf 늓Ǘh24vCQ`աۨ6]YY'g' ^//8x?z/Tj[QqX  Jf1``3&XQיY= !q8nW7 ` ޞmFp\o_N!!l/OA3koY nKnKnKnKnKΝBm٭/ləlfcv)r%H{PRZv<: J>s[47C}L_h}?0nNƵli/-bg>[};hfO2b<n?rP 3(oy5o"K}B?NBؿGg`tl Rn0HC-5}ރ aCvK -͒k֥UTr"U^V"nx%/W^TB9H*Eץ;9k& 1dF}SJ'9z&S"QN/o+97ן]7yh 1o :HTڥQRJ'%I wktF@ d1bŹ SԔsߤ%[)&fADxC(l]H }k ^HQό3s]gDpUTgʛⵗBy*?_?JQe*-_JJ$KDqi1vx8Xd<02Y _V}N󥦜&-ywx ^E+#;2}{p0/qWhqvKڔ9F]  u J,XG_Ρ4 Υr&d(r)sp ߑ靿iGw$G7N JyBH%BQJ#NFFI`E _'V}$)$IK^'wƒW ~$MqO_bŠ)UblmiJL/ܽ8͐1fx ?ٻ{ ˇz-B>78iwRl ^4c6`yqpﷻ S| xiE [P;{}38OFrI 8=}췑[q=(z72"ȏhc8ۈ;I߽+ M=k`U#"72='p#5V=N[TBbB0axx c SW#uĔU/3|S&T0wmoP| p9>k.b(D:͇ FtIyS?) *KX *9^$Ol2* '.p1g'6x ӖGc4;@hq) g2A޸ K^S :B /$_s}&!p$۶r@:8>bdǥTa^haк6k{xR-8I!ᗎ-D*eB]?rx7Oa' [G#V GdsDQnLLmAmw'f<?6\vF7:G~' 4qN?^t[ P\;*mwwse1(bZTۺc9kĖ%NTiډ ]͹0ø<qIކ${ L55{z\4&qhEW7ɣy-!7n jesEn,G +ŵc]zxhAU3N ]1hG~g?rVMjpn590IoϢ4ɴ6J1EY;gw{{ҙJIgU#ڷG#Mtޛh'&=v5J(%~|jmضige];bԢ3T6ܢFƶޢw7$wŸ5ӚZG ޔх8.G?"#{s 0a&Ш1)+̭ٲ9ƽ[F Mmα1fi}+V;X=JS'z˫O?:;l$u+H=:PٱIJ`,ImJD'8 ցyIr& v&"h8@lPNRm0,7ӣH-bЯ@ב1 C oүLXSg#g 7D%X* ({PO"2I#O1: [=V-t) 0#crU( V)eG }B%h,6él8Ob [ s27D}.LTN"E:8ŤhI & eq;a69f2%L$IJ$ B׸]Xzm\ `".j-L5A fFLp vY ǢS0M((VuC70rqPI"&`CG$ p(:/J݈|[M1@R zZbFKgR'Z \6p_e1Uo>vw)Y!%KOPR ōbJImuQLŘ <|@(΂N'@O L'0ZPk:!ςq#L8j'ˁ#<,LܑC՛-U@h—5TcCdX(v4wׄɱQź3+`2J'qu¹s' QCkic:`v:b BcdB7.Ћ6Og*x5]qtoIyWBRmHn|WSΌ7|SFzgC0@'b٨.HT&ej`e[# s+.a"D"G>. Sg#:lkH˫L1D 0Y@'^<#+$=7 f@,6Z9t05 ) 9Uq^^;8Veږȍ0uTH D!idzo j(iIj3_ݼ4*VwKB'm׾S'oY\ d*mS7d|j8|?e-Ml m# ɬ {=k2r]&(d$J}ވE:PkpJf$QҮ]k0v#%UZLW"$xnKg.p՗HKD2Ƨ!4ۧ yXx!rc[Of̈/=}4JZݹяGƩf uOںK۵q.!$b8c%H>vw|7,r y?uUA0^Ue=_ 'gkAfvO~}ǶLP65~g6U3D ̞Į&O8:${Qc)=9v`w D ޔ&k"7/WU 0\lkf?ݬ[j$w3 /mL^/勭 ϮKQN(q\u ŭK%Z`f+*1~8;l 'ΤmU[IRnl>8=Y'a&/Np^>x)xVYyK*UHRM$EK$*KB_3%G? ޲=<ՖLD1GL9C_A+HSK'{;:;?8ǠO', g+Yα"I9QzRm R&'bu\%&vU.F}.@{u!Sz͡cm/DŦONkQPy-| BkwpL'/HIae "ɉE E9JLoV4r2$"d? P);fN!ܕ錽{Qwg\fPy?\}.ewJ ZL]}|^EGˏ-ĂLߟZ˰bKkO/NOJtw'lcN~ӓx2oh pD>9{G qYt}ϫ]AjXi zllhvpF؎\ Q@`kOWe?_t%ݡ5Eo r*|_嶜uP&9XESҵ*R^QеUU֌f&TCV:2( ϗVI s!NyɡLܺnhccKDu pT6-꼢 g7kLU \J؈H9:'xFGW}CXyTRJ,%~TSh JM5Ɖ9W8RUNޥ9ow:N?e$ēE8zrLnvv΋w_=hěHWc~p-[DY-Khc^ow&!m[T@oYFH2HZ fb4#q= 8<ǧ5[kԡ Xf )AlȘ.E=ʂ_ldΑ)ȳЭˆ!4w-ݡE`X/0ttC.=(0 2ƥplr t t&aSWFz!iCqHMx@<[ϣ;1{Dv±=q$pgZciLak( FQP}l솛t40w`Qi #/>5ވЅF1 gԇ78@c! j";LAlx(m'Z X֪hw$fIMM2DЂmpCpI AIDz X7\ >$;-ʻo^EwЀĝ8lO"^:-p @OámQWRV;IB3Y7doN8T ވv`QV.+A/Ǐy C\ٹ`:o5oh+J5 2l5; \AUkGdn;C̞?TeF5q{S=%v,ι:z \J[j-;cWMيpOhk^v^5GOͣ*Rtoԓ6Y'C%Xg,LﱽyLvWRYT[U`\$Zq$ 4)Jq ,S:qi.IT fÚ9O<ɮo2nsɵu9mPMEhkNL!|_Kgv}3֢$4r4~;L,7mF$dp;9@%0Ms`7L6m' ȀnY95DFZpKȖ;1 |GQ/~7}ǻ$r'h-Sz~W - TG.AtVPUJ/5]ԊvJ.֞%+U՜FA, Ku1G #3-hUUBhwWwI݇.eg%KY0 F*z3Wh]/pÚ?&(JQE#YӬ8X]LLk};[Rl'--F6r#Tb!so8*UJDg!+Πds! D-|ϘYvwu%7֧Q'+^$RD{u5 odi _mh{_/C\R"pva(ճ,' xwkY6aZ|R8rpp_Si8Y‚-ՇJޫT}H}稇Sr8}&>'kކ8 ~HԲJ_Mi[ #;[mh^ҢEC六ks+E5 2ȶ.C݇u|mlQFnΛ6?O6ITb&fVjbrTW!hXo#9z Iy;1Erj %Tl)bJ#atZbAz m,&䂎"Or)FkZ(ɶv)YC2-[[]{#1"G{]%/0 7URg8+8iF]kTqZ4ȇRJ1u ! jA[...(>'5_v}rgɨ.8q` To6 hCS-5nuE@idqO±6Ì 渄m}OIU cDPxcG3砇**uR_?C%CPEFn=V'_Ut)Q$% >ZͭEVk2E=G<с͊ۻӗe3?Jdgg5[w &]30ݷ]0eo&67XM w]>,&`kaHf5b]P21Gne;(N\0+X8؍3FfX1+6,7 yܣpmDhd `\Bx3<iyU@<iZU+6{7inA-.Oi9*ݼ m/𥘱]o~E,`֣-(LKuDه*`,{j8Mrlz Oh<%a\B=^ؖK)KBlCmu :*$Q2DCg <%=K2ITG`H8 *P]Lګ?ҡ^8Kyļ5>DLJxYk'os;KJW9sǔ@6-4 Z d8>0u{6i\yVCjf\A+78ޣz0QGB<|?6Fn=,Z>o2Y 09yRo.OY_r_㧓,rnOwM(If65^>cypxwo/o?_L&x2a6O1w*&p.%]% q3MIOU)}+'sZͿ0Tk\/:Q_|7>`;ag?o̞Fz,獣Ӡ{6`4Wpe+Z,yuL[{Kc7jӒw*9dmj]h\a뽓nhkr˘T,A>ǒ:НCr:/˷f}'$pju$ϑZ#~H:u*7TzwԨ;;:#m][_MWk~n܌z~ uO$;o@79>䟃M!ѸeuXM?˟?\|s%?#~ⵙ3-VE!{1kJoט6~`.sxa,}s`gWy?(1g?,wGz`*^7H!Dz_uk`$'o Z}Aىt̴cr?>g]kp3{BA欈^+|J9֋_ע˭ئ5mhW6=k _;on+knHc`ccq),b-NaH}Y@wpX<]Y_YfPrWWZM~n@ƅ ۸ ˴RpJʫWA8?ݏe<^a5ҜQ.u %Jx˴C W;J-.^t9AdF$kNdb P6Zt6p@d ;AF`BWvX$]pŚ1V."Yq@rzx9:,jXCAxINZl 3oEwQ}srs‹ZknHo͋;I"'{$ !Ee˧+\jq'.ͧSZ(de(Li DV6ؙ?$٬AR^^ʌ*e{&6%ģ[Ocʘ+ `mADXAaTN! @簜|%.^֏j  k` OTbp0RKA B(fBTG@nQqy4E_|2 5c<#aTYz`]KbB>+09{jw#vBu+Ύr?]tx3-8 ˥+wqj/;n:EFe[^trN1SCIF1~'S d_Ah:9/dLjv(?iL[0yh8Gv\9u&hsJG!$Jq蹒Z.a7oۋJu[{ÛITUlZ'FX!r^ a%PD 1 $JOSEZڔ0IJTENY'7އWՉM@B4e2c߁iŵWE\ߝ!Oa{ȿ;{mF~]?M#,T#aycH&$G#"pst1dA̛;6I!TvgМdAqQzp&tbz\:0 N7v:v=Ȼ![$ x7wRFt%ڷ'] t; " ^s G]E6Fx|Ql6\ =j/ aeQ0Zh*G!(C6oAA0;7׳|v0.y19\퍳=u$q4K pK xzNP4gQp,}Am_$࣢6ILQ:IK@ Z8E&g9eLFB>5#!S;]9]oSlx7swg 3Z<,Z+A#B #9!Ia@ۥ'zLJm$-bhJ dr ;L02w:Zj%nw=f?**e^]1]\s nл/8wW΋tTդ;1<=t'#R"dgѢ4w^?)DtyW&ubhר3 ז|.{wxޚȯԋa|(Aox. C]ԀPfT^sn~ @,OQVP)eءgcaTO=}~8z0Ƨ 8XőaO[XV+l,vaT.K =vkچYfcaQt[H>d.(Ÿjɋm:)]:E%=t_V5_vm3"|5}%DӮXsbf[èf <t\tfSN~4>Erdnd@daL3ƛG̟y8dZ񬬟=P6kg^6S|y3UIیӗ(_1ع} YOF tm " GhkAyW.TG8_L<|pvǜ,$^73OQn٤JtKwˁ`2M!B:KkEgLkX]~z񿿼?.;<ۻ퍾0 n$ѤStۭ3 5UYeUgiX7m}sR룜U@{.]CPDuCT\S.f 3-滼Y!tk67:,C4RIoʗ,׌qs&3ЙCs0kZ0ʛ0!ýλnTg= qE:֑-Yw~_GxGBrw^-9:~zѼ&z8c2vQĪmn;Oi6׆yNUʇq9\r#&9Ӄ䀌Sߎ 5Up$ Nz{h%?07~|yn2Ĉ0;&mCUc>mLqM[浼7Gnoqek j VQmmUӔ`Y Q)휁sXVb8+ j/(A5V4 8?o`4z1A3ƞY# . QlQ R` S[U[.u \ƨEJdLR(|Y,9yNƌQV:(NTJx"hm^HuR0nFr N#I;#GHcxL6c`"FsXF炁1ތI>Ų&u"D?erIGRXK)p , UF c7  B"Z 3+ ?݊\`i.$ k`7LJEҍثy{%h1/h q@n اI5EK` ?k\xPB-i^)92y{Uң X{зq8ѻj.Ἣ@A`/ o65~9dyW`RƈcQdBȸd!PzϽwYP]7w BQڒq9/=Y;II&q40Qiu+/D) qi1QJ}Y:B* nL+ p/WI6J*f|?/'3y3͎ ݏo~UhB(X+E$a;}\>uEI9ea{wnWg };&.ۏpԥ" HYRw1 G؄y\u(p?><_a' ^~•/hY k 6l* "_)E@Ab.9 `XL 2L1,Tk+O:'BQs_P2<#  NWɦDS~w24Kun)8 ; URyB#VCFcA&%XPͱ϶t^u맬= oFE00}?|I_H Rrt%8F.e&ER[!%;;3;;;;;6sq6YYֿVS及fc`<\!*G/NK8 OM[[{zUKWioZ9 sH~;YtKnY8| 9]{5_C;Z^[ Ll&lXT<Bv4MsT( ppx;5=!-=~9_w2Ac >KN. bM;@/zs{cP0S;wo_||}= aA5\ ^9 pᝬ-hkؿpϦ`.f|Za Vsx =N>i.\;|90}x=_}O{3Zwty̺B$]!E7]qtP`@Mo0vK旅0O:?` v՟ws7]%&[O弫cYC6kF#88 GӾ=r}w&?9|3Nĺy)#htWO};lہ߿o0xBLw-^f-s]k׹eSS{_oZȩc~ֵs~ Yn9]GJwEvSwZK[Ǘ7ۺf,l{lKTa#&tr6"gɒGƥg[Yl3v3yjN?A\Pӻ?]r`MI>_ lmGsw~"⏷檹@uE< o'y9?J!d£hSm#RM\eB8ǣFT'M!$dy|hWٞ7 'Ȟjjzh2 q(CX7\[f" .C'{t*;*ĉt[HvNxQ={ͥ!KjpE׻OPVK3!Ɋ, c^aD.\-r;(ɰ9AobF-m:T.ݾeڛotZQ:n[n %O eLYeD`KcȘ*e1U"rdVd|}DgF4A7].R!6 * EO7b5P'vNS![j%w'+^J{xÏ8fmx K.y4 Sq9ީՉyEXl"'{m Ь4Q8 oCWK0uw7gsyYj`af Vd5X~ƻ_ZyŬw H \XU^(8?z]@*7\X.<Rz5M lK\vQ[ɋ)?9Sp`lrBsaff40(3Ѥʭe/@T@z;˃xnϳ}S_8>?xǽ -ܫ'=8sZu%| hkHAE(7E +b!N"H4.t"dȚOqa|*/GHD%<"FvUy4kDPs(rvZp/w$ : yvǶdA~,4;`Lb*(Vۀ:Zۚ7T\c\_O_og М[[flK|Zdy"bYɕ?R`g d bQ8ae8lY[Hw@ &}bnQ"Ӧ@;D&l#MO"GYFrO&Z@gIu?i6NS8Yi)Gl;#βjA hCOHR+t{cy^ڌub.U4]/S9B!ZR9J0%*і}A5R 5۴6u0s'g`|nƶպնv 췹-{= \nPݪ^߂:ƥFm5wn65DeS]dA"DJ]m-ު21Nt`,0,A(bWR"XIDqdH"∳$H8uȸu`pWp` ]-X} a]o0tl((:ڀa&Hcp˵AD*O"|IH`ILbUQHlbJ%n*b-J2ʸNIsߏ:Na\Z0 S%Ǡ<+>'Jjk9`W$'U~e+D i/r:"NbEXeI ˊ~,YA4'.ޘ#H =#&(i"@$X&fqB5P$cAHYek,hLim9soS3seV{wYI6$C0l&X0x.jii aqa`'Z(G4-h4HJӰ7p]ʠ1c u2\V;#﯂$uUpqA X~tZ5:{@klDB{'mP3)WLRY㏬H:sţ+}FZ@] vn'7i/v*fc cc=1FX팓9@-8M6 2D{E"1_zTw Ћ әQ-Op0k0D(@Ee ]ah伵Dxڟduw:MႫ1=r1õs<K{^WU0,i+.R z i*W{ Q.wU#)-{`_z0KQ 5NtKݾD.mص+e4"bl6H H$H[Y#1PAEp8/(`…E8`Q>w_p$q~ ѸZ/<^۴gŵwasi ʬ;EC)c0nH$`$a]b-\qVS t88zgA3`:K_Nsnx bTΊ{aߧt;zSN;ѰN[rSs9'(EdMƼO Xd`ȠdZOݦ$u04o¼7~AD 2r"_pe8;x ͑l ֬ƭ~h+ S@X.v pEU-lONc 3ʤ]*A;{G@@բT]g.nI ?s*i O`@p%Q$Ba8,E\kw؁GbJ?`]ŀ7 ކ>$们ܳ$v`m4?mGTT"ͬ)> fCO@!:˒1,csTaEuUEc7@=И :F)lWRX6{QaF\QU @[)O7ʱ$$Jtթח/4 \1 Bad Dk4Y7 y^S+T.ĉQ\&*cX!QAϟu-1~B%% U`&"(PylX̑gLk^JEڿk&էI r/V2"IK (ɂk\>Vl99s#|y^-I~\'\Ǔ]ܛWk/.KWP l'⍕= @ E~j*djw+]DZW~ T" X2$D_#yɕIZIjk3=3W$kٜUu]]]L q1k~g }sc٦Һ;kN/J4ѥT斚)1x) -)\4TQڑ%rBA[&pj &)"]ɮÙ"bvI:Iz9vyd3$sb L^^‘% rCs^(S .u?7e^t>}H)ߛfa~|GM-_,)xW֌s2^ tT+'D)N,~2EH8gP.kJ47gSTD'vSVP:8qx(,C!;ND"bE ߙi'j!ͯw>g1kFvAo<jEן^%~z $Ԍp6^%%a6Ď]`b$qdWD`s}0j߾r;6=[I*)A@h 򛻹_ J\ֻ` 6A_:Z V*.{e|0m ]ⶈHР`!lՅ8ED+άrSJh`;^='ڂ ĒPJ q:_es J]&f ]`WGKz?_q}WhK7,p~ / |q{Sb CbaQ, 1*X?7G(0A)/N:[6Ļa*$`RpcJ1?|\rպ k1^60},]>W'2 [oj|ggQԓQA !Nv|dQ h[(h sRrnIӬ\j_Fnk^ߜ1X=2y=iZɌ-!C ",nȻX,g RVwQJgO`ɗ7t 4/kݡȡFBq ;e-C@wm?[>ID`E>!*bnQ\ءep {_"I+Q>x=T ly飘>yr^zYhcY]' -`$b^3pv; -(qyr$+e)ܗg.08?lbqQ,6FheAioAMwaZ0Jh08|8-mQpt}Q侼 3Mga./D❨Q5wF;QefRe?.Ƣ`E` D v /bܗfa.9,hkVL>VJrʴPpGK=bU"\E`֘Ń$VT$\Ŏg w#[. ɬ,V}y}VVH3~&Ue-6[0c-8A{*؃ }'bp7%C(gVQxd"3KoXq S%)IDR`b%4cO\G;7ܨAT=zQ ʪ' Կ%:$*;%0S /~PO\u eb-/!~{J:H1~SnDrfVW!C;əqcG>†n@1Ga_Nj}r (d3Qly:uZ `&^\? 9u6)^Rxj`s40! FMrkN0)س2}DN i, NB8lhP01Y@rQ#Z ,!C v6H.:ܻc 1 -F0F,ADaǞ<|f$OfBKX ~FA*ȾLi/`4gD-V˂1! 0L2#x(1})b-'v;-b%Ѥ|ǽ]%8qc7[J0diFQґ`Ic gzl-" ʝf3A 0dL%eTyG71yEa.Y*4`ZDEN"DJqZlz$v(i5% =yjVӲש&{]-s0sww5QHDH}jI%^Xy:zrfq T]E3Cr`0@UC8zd|@Rў%pD|0 x6ri8Hx"x\`cmX"۰!ofw"u01f#׬xP)tA l;nL_ u,⣻;~1*( C0R+Hf3r|Ӹ^4i3()UcoZ~2-۬xJPӢIʰftfXG&C$lsPy=xur:uHhA:++XR'GޱPR*ANh՞k7JBx# ZaIU 5vbeb% >ZkwMŪ@BE/pkFwNho:E5Â6G3b);uK>Uڏuo0a!bJ87~}['f+u+BAwBnғ;ቫCP︕("mGloMoVSљ3*&>K\3`LXb '/hDEk.eZ}UH(ݑL´ k& hjQwQ:#9*91O@%&4ngS]+ѝK螢17_нSo^ SSUpJwu6Z[D3BR;,"$Z']TK0|GPl(CJ# [㬱ޑut̴zU _ RD ?0N"2O^; !٫YRLI.˅/VR*Ķ٫Y"PPYV!h` \X32"3<0:>V#F"lWp1hNRE4,aԵJ``Qv:XI8w\M1*wa\`$e 80)nrAh - 5KZ  `F[A% `T>Ile\' ra~.33xqJ8r[WW{qI Q+l|qwJm28`3J'isNr&}zkW. iݝ8ɧ=ѝ4:Cؓ 4:U,C*=H8$V4O^XE,={)in>Nrng}] )H:%ӆg%)%&:%ӆKwOMGqɐsʻKtȓ$ɑw\8-*]8.>I,-y*8E-\9NVi& T2_,SNm W̫X8ƴn?7&EQCjHlQ!i"*w%?'F,Vsa(1-Bp 8INNhIo'2 !c:~kBOmNY(n~</I=,Lek^)Ax"[Pu\P(MdGܽJudxɥL(m?O~vbzKQ5__>exYVEjaa|cΈlG_=d$x?ߕ?݂OBcI/Qü(| x[ژ% g!D ;DQ(=>Lt7O?+'y k],~Ʈ_O) mj$,q>Kx!R;k(S5zͼx4 ozlt ˹rX9\&E l]&ڠQ53VO[$ HHm+P}F5'ˡSPI1yEg!ۓM,'U>I#朂ԜVڡ8O:W?s@J7QGoE~Ơ:>Xu*x#&6Պ8x+fn/EDy(= x^ȘDo.yYxe_K1łFF]9Ά $mKg]Udqv-o]BѶdpvIcn8{e!vPy컯lѼ -*@vdpwaOd ckIִD~v9Ֆ H"dO$&< $tT>~q֝j*9H]:<ߎI*FTHd+3[:u.T:/ ʧK\2oq_*e^d^۽. .0]^=gy.;CZ<8~lF`&:wlܭqEC! W  '"FACdCuYLy$I$5ќt~wNu(K᝺ : 1p')M[Ifgrg )qX~@g-^Eu MjۙKz ,݇(+S ]ɢjCXGw-?ѝR=iYw*%,(H,#\KLf4Œ 鎐<: B_8W X$s^LN*FX!sE;Fkc_[NPt &~-*-R9uT轧?Hs͛sVTpNj?+\`% il}/ A1cdbEn-m&6!XD" MG`C`vTi*'M?>yF.X&&CV$% 1UJ(,mA-5/3X;I.#jy6 _j؛f R.O߬AXMKZ5ַ jJ_+TRTE]41W~ܟar*m 0kcx)l2[yp +^aZCNRi M:IXwerk,Ck'i-%{7O? 6/m627ӉM_Ʉ3dO䢓CPchJ5CsX{ep-&/ccxC 'pm0Lq24:F2 [8a#0iobh'KvlG/&`.{J ñߴS^q%Dt|:h)'׃N̬QYpf'm^w* E. 4<$9M RI}ǍS)kgXD4\3KgfoFCiicT jb\~ZJV[wrU<)#dph0IR_2#`[ #H2 e !Z-isgd)0 ]GDu1.mr4\-7 W` 1͜ʻjSbeRF0jsbBYӬ*ïKul̜㝒ّYiX#U2OdL.1&HjّY r @!+޻ww6Q`fB.pc>Aޔz!s泚L7̬TYcIhKglSlFN[0Ytc&pN_^jN²Bey@ F@8u !³vEdL̨cEn0%@@]B:peK c6T^I `|i@A6ruYa)[A@$~lՅf}.҄Ʊ$<"Mdd@` 3IA iKk$fП%ƂHNV5%gKSOfęo@󩯓d\vv(sG#BLg's)NA>@M!}FsBzƮihlpImunoʤWja.0ߟ}LYr幓S+CsEծW[[t0#jP)\֊FqpJFv?RAaNt>@3q۽ư^Eͫl{eD,P|0@<]`rXhR(n NL{ s:@+8'59XjlD \?(`4r[+F#}E=Z'w`8qh 75`nX6|, b-5L!Yxp>]p8=b8bY220qsڱ~le~88rO>kILaPHR &ñ\\%gI{jW.bH`eL-C /R\bjx^2_ų_t.?4~?ð}w/y'do|zn^xv͛o^={yM>~׮n~nݛo#^ku} {O_8}̼.^Ah,&O_O@m]͛?O00&>`ܧש-3A֙ .gY6:5.UkoVCa+>Ot`˥3|Q8`l )MZ41Ňi$Ԙ%'eDgm`IיR*uoObn91rgs9CzSl'4X|fZLFC9+HaŰh!jǚ+#g?$\2m.uV-OЊź#\!G#Y_aB"$ : +FV@!z,?7>xhH͍TF ,\(#]TmKw-weqHteSCzXDXcP(-^n裺u`KF"Df"){˹h9&+sv'ssBxRE"JTeC.p N+cX9!p>\P^QF&jԔM<*2ؑiAh&4͈: 83\̩N5 )>maBZ:X%X/^.+[>|PU]1p1(}>bFՖ7k;/;X,ܱMNJ]3ùۥ~<& y,Ǵ.IsB1 }}+|/ЏcN$.(BD̝Zy]' uLaqjrP &uYm,5}Б 4sy}GK[qvΆaRe*ר~{5T q5]#suDJM Hsj}Z@f\}JnYHL[a 28$5VH:^ӜFY.:TTmuEy7¼ւ:G(w KM],=2 s#c|ݭ֞893~`G,X`MW ֟é2~qMu 6vb,n[j"|+[[b}2K/ ^-nc.ۣO7~2Z|:;+/jeX?Ҹk7m2ATt! ZNI!sXj'f(C@eYJRNӈ- u bj$Ȱ}TX8{.H-[)m#ٷDw]6hG&|z;ZC/^OtOt֙"W׼Y0GN&~]4 TuG*'5,r" y"%SA!gWOwvAi[,ʈNXJ4E]uSҺU!!/\Dd*xtՐf׺ ѦɃRcGnYU[ӻ[E4F$wA-eD'vc[9Ds[FZ*$䅋hLaQr4j{"쨖W leEH U 1ؿTn8 M`D/N!NJ5A dC0 UjƜ 0 CP&P*'0q'k$=Is=j{N!{{p3 yZZW/969UdjY+D8DYВӦtˆkn342~ RC}$DGx@|Z9T.2ю\fS?/.ϟw#y9XGG~~ _zKlx"^kg1Rp:CA+pCY,$9p8șHy8-w?^3ߜ9?a~KNt*"JR?-*9@AhA%ڠo"bD0?\ި[sSB XgBԵ̉rV*= p&/oƷʷ;Y E0)ҧ卽'w_E7?F qSGI XI1NMq†$ I+ bgSwWQ35N9aZRpنTR(7Qwk?7j2Vn{$$E$@nh.P̛Mǩ5*NdkPN@HWFXPa0}7b)ك3vՌA轵Ms7Ӣ/ÚvPHu-oB w씟c(gp:(fL:ƧVkF#<?.3i?M={CE,fQb4Dt7s}OR8"vI[E.hvC;֡[H';1I>E<(Y飉"֬ ٝN цn,ʸs3*O?L6Xȝb1pko[ X7>ґo^=ɫ~\ϑ{\towPtKCkd}mk+_/cfܦ,K9g]a"Nܗ2aynΓw~MrX9 E2+G49XМ*a[!7aMJiޑ#C #&AEqG~NJIj%EJMJq/xZ%Ur=EO@ж䀮4GwܛGU蕂o{f_Q[h+h󻌢pjF= ^dTBQR9# u&Ycu7_4k2콖Z)=8^DdY25}qsAy)\v?0NaWm)XN;4`vtn}H cF(%Rө$@E$F !75RT?QU(.* &܅ʦ'@!aK$gI3 Q,^Xb4&l &$ 9c}JY|Hf' +.Bԡ5w?Xx-8k)?pQ">eSwpͅCpDCN CRκh+M~x N2 2d PR<̒q/cd͍IkV)rB%<9wksdd2V( F PA4#t&yak)y߳/?E($F2t]>St:Cqt.U#Fe՛[ 5;\CVё|kRO]]waE٧SMsQPlTe=ԁ10$LiBR-Iܑl3eP?7K?XC6A#8h/6G#reY Q!p6Zh'E -F.8=_1Ʌ(0QCG`\!nzrK?62jAETHG%d}$Bc Q"Qu6У er#RKp}_pN !Dd8K,JJ5OJ,CuJl#ӄF 29k iʉK.-{e0+2}iA7g krbD!ƺS\k{n]`AYAdsnUByy>qCF5岩"!}z DH/(WM-c^z:Pkf@D DT"93 2K-*Yc]ѵrX3<|wA+|Mܯ %+ky8JocOS!^.G68tW@ ]DHʵI?AXƞ PiJ\tP_QֳUBr4QNJ2BJF8e@0sP(։JZ$9kl{ EWxXQ_f({-]qCz$م!^ ~j%v}A$#v"6۶٪<;^rxN1t"BO'jipr^rK\+,ΩQFb:Zz68w8E׼F9GI*H$3JCJ2 MrXlEBfe{էܘc`nJa&_dHX 2\snG;?$ dѷʉL94zR3s5v6{/ ?^]h;I,;z7M/}S˰)%9 ?{۶ze! rA^Q/&B-'IsA%%%{)Rk4$g3; j3ĵJPJ, b<̰p؟79=9L@3wߩ^̮5  ۀއK?f;_廋R^[Hq[lseHEgHR _#*W4a ?ZO+9Nd\˶9io_<]oAKqG/9:>>#>[x:o [W39L)!q,r܇ ,xcR1xT)2G_vǎNn۾\qHi '(oA6NAOCTEkea   5.reVP > 0ΔFJkAJ:SHrcP ,b,O?s`}C%uyLJ`Wj~y5X9>4: gy=yr16H᱄ fŒú/}%"($̽Si։4"@BYL'#:}[ө&&yN:R-Ƣ⇲Ău7SRyGE Nb,<# _V$[/e1|g +O屢2vM~xQ^c0ߏ|̗ED9(,\ [2. 1>( -_Q*u_TK|gQV @^b5J:%Rg kti!4K G[ !VU<6 \t1<2m8X nf[̭qUԁR3!ysPnsWK٥#8TZm%xRρ{}=AH&"WxBݝ`ԉ".C%4Gu$Io;vI"q ]fyZOXItOh24X}(QI%en4m!1* BXnT8PƐPFeqnP >Ax/ 1FVbu:΀xbp۶{USSڳމ00AV0N|f0VBw?R!ErMQ{-{ͪy QT|6su<)Bt+DLd`BUuƘ_QSɭ+ɺI:HZ^dƽki0=ĮC@Ed F*.O,[le*d _"[(1=Ӡ!JqoEc=(P Q퓬T&eDP$h1Az^p!r#8 F9ճT ]Kj&ȓ)3 9J`j83e/hn9kZHp 8 \x'`4#;`#lF =▲ qbaqi2[Ӟ[@[v_Op/lSdWLJ205p)>#!şrtӎW#ur$8gM,HJLriCĻ iJ䨒϶E5(_·M'-|=:ICخX-* \k&K %C'KxF&t ќB,H_ L!I))DC)9 7=Q Wk`Tvvn# #RжQ 9 &3d\W% iFuVukWPl׭A+AZK:tJ%U37v e]"P&\Ұ&8%QoG:`B2`q)˯2fd.(,(rG (!R41a>\O/u9Qb Om &Ig\SR6в "bS屒ԼXф )^:i2q6`Rz;X9F5g~C$[qRNJ;-V6iMj|Ц|" wY]0aPY´Us&5>hjA4l֭d%*MsT6'qbtPuY3r+Y0pᕙ~ dd^!!!5+$!!9 M9Th-p M.'>0|⬘ FMok5{WsB>QUS0UXvh᭘>Xw5:yk,]+¦5t(o@}UBGY2@-!ε{T+$W֭o&JL3f;N$sh$f2EFjZU7^] Yַ*u)wnN(Z>% -_ZPLqZذWpmV NZOpkHqFz(""-|쎳Çh –˖>ܩx{Es}Ŕ~(19nqx\Oy5 @hx \:@}<էKuOdWj+B5A ChF@aq2f}d?yu?/.޾yL~~9f7^$Lo~vi{Q˟,xf^eSA(0sҗNomfʼ3Q9^^CX@iwN\F)l;ӵqMSh SgUtv9y ${pvOo39q: `Hf<\mͲ5r~ep `,O6LQx>nu4 +څWˁ{8R4DOgn}3ser0Sgޯ?NjgCo^M̼ `tUgZzt ;^‹~^dzs,@?}@ Ryzy9|ԙG=]Ν0?N@Aȯ]6d8rfLer`߾e fr? L+8;8"_xO22w(k<_5owd9κfd2/ط6jlx u5]R.:8.irAmbEA*F59fޔ2/VAK.fv GJ^~ox\?IJ{#"9ᇸ%dJʍKsioAWw"SI7wUR` LXpKY8E0hTBڽ9N!׏U gdѭYfw1&r\ػ8n$W|؝8Pfv&3kp6/[qLoI`ζPYJ|HyWz嬷.(z]xéR_4bƽk9-FZ7()tPP.tRR$+k(JZ> `+FC\J-U R*z)C 2,8iBмԑJR5̈S̈"\̔UQ(FM -/-uFzDƌ6ij0kN 0g LaE5£ԌUѐZx(ҿY-10Ε$;` dE N,xE Z.䋑Uv.mɆ0Z ޸wM:͸=ItF}ZrpI_>_֑,>R" ulY:ʨӳ/7p!R*Dw'F$LwϮR90'ip߿9?+gwZGT>sg. Y|pKaEf>7SZ>\ Fըhɘi0z@i'uVIi%pEY40$0ޒ}4<:jKDy mCDfdÆ|ӷNp?"5S,eeq G=ԣ&B *ZYr kjU ne{(LRɔ鸞rmtNfvNNR\E•%m &iYVa *HڃP$:w\rt|3[}L:?onhV$L8pn"O  8島io#[ꂓ !XSKh+-mbe j6U܍.3F&Rvۜj%0ۜ9?ç A1%aj#b*4m5}1G@.#medh4@*Tl' Nz#4*zS̱LI((8n@`-)WH-5J"HA;EFʎa(>6$ڸBN+>LJI"Uvti5 0MڸXm.Ef 34KiL2Z p9#`s^#eh )3uٲW\VmgҒ(<[m\p{,h2f=](VK#̂=soV.΋ْUFU__'}ZzWe|)#8j =CDE|M[:=}qQNɊ |&.ί\1I5^͓jR:o4|H|:+;(gAQV>(:aMRW9xq ԕU»[fyӌqD ڡtUNW[+m?3 !QueEG˘)QCvh| t,5]ΫA;.ѪaW_?djK kf.t75)hRWn~2&#aKF;Tnt㾍fC*w{5=-jd-α,M7Vm\d])]|eS+ j$O{+VVV={~Jg],/_m~:{ $t 3/|f:vUƥɣΓwLKJ#Gž; Sỷ|k )@YjgǻY,oqwsIf |./>?[;AjCINBH_LZ/Z29/48kE2V(}'OE/]\)l9#j]R3#PuVH? bh;N(deiՅ &.$qJ?;/ }@xxY 鈇%xVL{rii.*,Lh ⍌f}t#.y9ڏ-랗RvezUG1a]0 nP[2/#.7_tLinH)DQxdt燼4ZʭA.}KqrJ/MO3;qq%1^~~H#CPfoc_޸1H Nj5ֈAIP))EU#eazN:<ߝ$@n{x3 &"I8`Aejf4w4*Mwiqc8|)iźʘԺhiuQ.)$zjUY 2;8|;F AU:*^U usxhW-FW_oީ%sDhE-mh/;2Mո(<<xCN8K&na6F!lb"U(Uuֶy1Wi 6z״2(WsS}U˕Q8gLzR!\ 2c:-Bиww+}ik0߾˻v:ji zU|D٪JʆAq^3}91޵*+D!fׯ^5[e.8;idyKvq殆8s0ϴ,|Rgے+%[ȏVh5FWK6tleS8g,&=nΌE)@#wCL`Fko+Oń iȳ y>kQuNۇ}i[1,W0Yd5ӱԦ@6 RY<qJd>3H5Y'&+fH̒v?_΢i;N(8kc|5 숂VE ZKYj$)"᎒,5W/L<Yp(8H C]JaS|#^3tjբ#Hm=ȋ͓д9:, )-?Rmeh7fh;&tޓ5oq2]Mإb7^_8?AD iOrd Ƚ#ȲCGo#tVq~1.tna.ᶪ6kvugppnJk0Z)5顯-V1QmY6 !2S)0.W{ kGFm^6띴A4 Huyw=yzQO)㠟=Ԍl=H2#UKI3:;:S{`N9fB&*5VJd\gl^RIs-$wΰRR[2%-:B2ή]]goOd ;x'ߛ@gݵe)qUX`RdǗ18n$MW8t \Lxs8(}GIkŴ =b>Hw튙Y 3%K;}X-9彂NCe,RϷg7wNjYMsK /ݫ˙{~Gw61#L‚GRY`JV|=dX@O.FD4R#] 0&z5vU -.BϩBdG[TJfI ѵ|,9^Qk*7PfXV268?_o Ցrъk~FP,]d%!͔. h#btabyRrb e%_,,*sy2P%[)Wd3FWl'D] @ICrztG*` kL*f6g,l!T4{@ i_8Qjs!R!cKz1SCp*M5jr5WbSowsK}IeD&[HZŤ,)&?"}' B:"VA DhWѤ.Fż~>}g JsP4a߹ZM tXOV,5qc'dsSgQ=@ 9QKT~ɿ=xh)d=-da7t [9O,pBGbL/vȽvkO8@iG+*O?~O_=3>^`Yh?V?fvY<y#]GZ$q303zƵcS5w:<)LY׮Vu0d}^GPSC.@z}V {F38 ӊ=fEd` jW0+ >[JKG9:AWZ6dC7ճڽh"uD8h-#E|0E-(MD@ #w4JGPX \f4!&r.:r=(f?HRi}4R~}4=5:oO&׭Y^;S:Xi8=]olLA)x/G."]EYB(WWg =+~^WgѢԮNQ7;4gڢF ^x@^8cT _ ],L1e/ eiƽk9#gF`co޾s&h$zF;O:|Fw2$?\UtL-Hܥ< h1n)qwUZⷉ o}0>^_*Dm*P6Y>]*PbC*zhbv=[4!8)6URS Z:mɪKq3"lS"6Is[^iYzK亽 QO*=zITӥmٰG̰=mfmbio)nxwsY/JK<,&9fw˙cl[,+IfT˶ܲ&nR}dbl/F"|z^ ĖW#=L? ܕ )ؓ'4Ƞ n]ɘgI-7W#٨H"lY& SjͲp25qg|$q39UR9mmM%+.B _.֦P{I&MoG+?p1j<`7zkx0ޕ$jȳ;x 8iԩ:0kJطD`ۤIJZ ZUKr@jϒMs,75}AA!Լ h PIzש3>!"턞 TRYYӹODP"v ~|hGx"Yg4OMEjFEˈ?/zd'3Xd^HL댃DmзI-OlѿhyFF+)#1卫/9)q&Mz,)NR;$Re_4,SzRҔ^< ~hN"ٶl8}|r5jbڈvMcʼD:b>vdiLœJ]Z_&$q6x+ep{+#0KBA@R`;3@r}gM>I\Z}"g֋/c}nP9 T~͉~TVLJ5h&?8btcQ7RN6 :m((ʔw;;uCXcKri f SfyHК4+>dٷ&BjקuiWѢl]"s%qyA0ZΐݝqS'{mReqh"GD3cDNpi-_$kDZ7֩['3Ү?wkpyS(xNlSX ]ݍ^6#1e06̴eUѭHYL|o=UTC ծ,O|괯TBׂ-hTn;|rJ%Pm@ڌc;PH6K8LENxDhb/*y)JXCNҚ=6rܦo3GpsۯpF͞P*8Txt{dW@>.жo 9Z#yiP!]j;ۛQ}׾RmWTv+511{MѸ5)q0I 7NLHt&& nFJlG]%ӏ]@L#e{5ԫhA'Ov5OU.yD]FQm=؝jXh`Eӯ^v\F~K$ 5B'X̅== C0Ǣ-u$[]oʾYr3XURQ #:mkak2sQմg474u:PIVϢV^v-T}k$R:؍8i,̔ uYU]ik9!1qW*x%S0],^wEB$L&$ڌ]~=qӛ&)9Mz|W`ϲV^& *Iv9Z~X|5YcVs#*֜(x9?\wRz֞;I* 1Bd&e>`98(f[=qc'8ȗK+#(ckD˧>6Υb6I?#V#*_45>P,nX]݇F߇{:%bf"N$^Q@eY *xdQPrCMVE kcvT>?)/S&wt6dGTw͔m1^;m< & ]TG}BPQ8A,e2~b(dz`yȻ%:!8H7Z XSKF~x2]CV3]N巯,2 5.@JCL&Y[n9$>T E`U|Ju9DĨtwzNiº*;vbl_O,Gz,2R܊zo2.Ӄ4R nl)%X[\F{ߊޟ|Nݽzh c_쉟y>7Dj_G{35εGmqt <)ƀiGes8M=XDCSg6b0Ѯ_8(͑p{{*4JEڶV@` ؠ*wyտ-e R=А=OQu}egZppnpH-%Iԩ9J[HPj`Dd^hwrFìdI\`Ie3(s}!2clǴ"\k ,1 8@-(pN}ZQ'L煖5$8YO(9WxWzף}[r)As^[dHdX:\ռ HwE)QP q/|4CA :V5ʔu Iڳ-r] L6E FH.o 4kcb=A! H'"-3^32fklt__;Bg _˺0."Ev5i*97f40Jæ^쒙49:$kre&M,eo J)3i~1 6v!ڮXW7ޟI9I8 ?@A=S .4YbO '8V44;!đX d=Gc՚"ӣ$!ƾ{zv58%O1y~N07M D𣗃ʙDTlxa#tƿ ^f˟Y3?◿=~察X7 `d8T~чP[W(? _g7OkqS*fPNߍ#n2gvT<3Jq-ՌÅ`2E,.2Y'tyB=;&Xcnŵ/'h8qҹS7˩AJaC'>Cv>ûѸD# ƴb+0dEa@NY]V8U( Vuu+(|:ܻFgAЬ{[@-tZw*5АM]BHr+w] j2흏FS@rtk\Su{!!@/p^8!d a/>M=H8NՌKSDJRMROG aUر| X0 }n|O_?xX&%Q0//뿃k)4\XAE!,6OBrO3 o&sJK2Ŭ+4r8_+V` ϰ ~-?t q^`>n6uV~6X 6_s0>_ yャ5TVT3KZ%  S~jRd5=N5y'~C&ڳLJO8և+^|Jٓ(^5{NE! &ɊO(M]4hJRPEYYA! )e,3GrCB`r vvha)QkOEego4-Ų!0&ύsƣcCФ:cdXjOiǤ+1k~?s$֗sE[Dp^r%L2Ow>ޟBcE, K)Gpre, )0f KBW_MHđpRk|r5jP8` '(#H T"_݄OYwTfQuGeU!i&4JxؖlU{*<iF>0@,O!yS ^3qY 셓q -3|NqVZ&&™ix)(;9F3߱OoO/W7\>Bam6U M.zuS׮`q O5mvK`"8li1}!U^_Oc/1r Eu r g\:^4#WZf~4.r2-B:EXn\N/jT/c8(*,ahӔrE9e墜rQb\Q0y6"3,U3_vXG\JFg*| 7|Q%{/aDq~ 0ʹdk!8s,":MN)Qa17O_aϤ97JB8N3z-ιdiW(TQ\hN`gl&Ƿ T 0]b'>xݦp~oiN Py p( Z 2@WJ(X R1.f Ay),,/0KKjgzOO-K觏O?l~񭟇7؛ɇ3D,?%386h^3.Am$+?%}l 69f0 MG/%%AWMJ2)b,-Wu*7C/<Az7`%pmW] N+K/&p5TU6 x%5.DCI#Б \:!$Rg 3>N?(B$lGD5fӯG撆d@1UխARm_[abZ q>e 焂* ˪%.LJq*qďwֈ`j30uϰaFN1fq$GB "‰^z{+۰KZU컓EZ˟N'%T ~Y"r1kEAV[{Fs }ϘXbѴ 'Cݚyî4KΛ];,2)\4N)վ%'avlįՋd!k9bpm3z}PpWm1{¯"0S [KR [b5{P G>/~1k9w73P cfwgx u4ZcI1PdWe g0GV~n877K<*%8=4v0񙿕Y+gg&/3/qc^rd%Gb`G}xs'W^[\}MV۵%N3O!oVnmhJUi`v' ĥSf颩Um)¡KqTGAp#8o0Յ;wG=]t{jQcb0 fC(4Q!kD6y4*ɹEc\1B -PȞwn|oTW'ZjNe"E&`k+!%P)Az\ e<% b>5),{B0~Q׵ʬZ8G;=aR2EC@4^!FPH82kZ{6lG~t!d|?α]WLW|N-8O滗V-̑LHjsilIzg~M:x9}m|`')ݟZe/7;izW8w -r17zڔS-RƈoΩ Ar ?跶ВŒyՋz ׬&xoK>CrYd QIJB6$ "d03&J?FS)1K] .:e*wN!QM|Һ}EO\A(oLVC'X Q ;MO#ܱ<4G="*NXKrZ6ZW;yHɪ.;NY%8ϫvsG!4'uEsf8^T@mRN^bϱ( n2Q3V]ӕeDI 2P/jQ!215*vtkPC S%,60H`K6uKo#` 6A9 s Osűi!Ml/"".$ c9F# J/[M*<́= ]l= ]./4,ҒՆt.NuIm#p66V(0TY=Fl5"ƥʶih}2kH[ oY]uXD=v1-T08ָE^k#Sa*X ːu`a$Zl^ NlTx]26gէMfpVۼ9-L85D+W.Wy& ʍ-h2+^eP)TÈy2y2ۊ=zW++-"m&8皸ƹjR[~Q%)pQ%e6-nID+搤 LBZV_6һA$ sjgƉ=힣Gۡ~_H~z5'tqPFn|m&y NHRVfyL ɠc_2VqČv崦Yo\1TzaLMGD`hlM,]l"߰{PS!kSvg0whwp]Kem!V/ǎ:5GK,LtU'a؊0b$g|LL:lvl(};:V INj$Cʨ"mד4vYڸ0@CJ gy7ա +]Uz^+7겯ʗO1֩^A#Skń%EdZgjM  KB9hN]J glIcpNOj8jrju 5xR}HSẻeV٘#7orBV)I[Y6y/* 3OިMуJ,b ǷAv i$ӎ&-2$"74=S6 >mr.ò ehf<̒a!Dt6joa[JLT>xĎ(}. P`ks;[)-[91Lxl1wbV;ܖ~l_AU8n7DzCiq!RD"ZwChsa2 O?)_q[BP$I*k긺V.Qi'>]z|/F+>ʙ2eIu뢓aő 2,tC#|Ec`\#R͝7n3Ua[ߛSohL8'V "wPX'3wYp4^X4x{ I;ǡ3e[aE^;QY[Q~W\ټYQ~W\*Atƨ|Q74 d@cBցLh *|(-rE?RoD5s/X{vn GUȍ*Mvp<)9Ar\&uZ՞FuKl P*hnz P3>q}8D3r/ dtm? /ߗ"X \#kːdd/fN!CǷ2 m >(C3εb[\5Dų㹔X0Hu`J,s[q[oIk@z- Fz10F+'fWbk/o&D_m 'ϲkXK `f!CQ,&RH;̥z(MhhZ Z2GO. ü.Kr.FR  Rt-n<6J?ݝElP貖wf9MCʾrܝ6'r` MV:w'9wJ"3o8}$! X'^j##^аW: 6; ŀ߁:-s8;d]9b_X~jy?5Ž?a{1V(kb9ʾxx۴'RͶ;wT+l',x-<&e m/wDiݞzdspLЊ>%ph[ZWқb9$"Lq? ?-E|730ZT a]䲐^YՊ9BpGi%,%>dC1!ZE{T̏ٝ\1$0jOץ:& sy^keOY/N3ɺ~ク?y$Qϰ/#j,b03# ]PIyxAk"5|)0m0aJ߿.$$6J{x8",ՐcL}.`La888i(QlTuڏt"дJs""=B&=I zx`uf8|N_ۙvA6 m&N{FLޭ ɻ\[u%9wj *]p&T9z 6|0@Nfxkw{Emv}x`llT_VY>w91Lh\_X g16M'3 x4t0,{Ӵ? vC@;-3TWiEq d@r㜑GS"~6%@]Yڱz+|m*I-ogwޭΦA21qjP-փc 5v ;a_I7O4:i`Es:"ſg71 B708%"PbW"*1.~XrR7pHGv/-s@ײT^eUeӾ|>*ë?`gmrg7,M^h>]>^ ˷x $~=| oiͫWn͇={{vϰFez=ϟ_<{?ޯ]%yrx{6m|{5L{5H,vI?>wvdTjWY^Z!f4z5g2l^4/ر|ѳ?텗g~:jx ijY%T8U4 OZqaU\/@+JL?]D& wL|7wz `Sfć?}fQfJٻčW(scsj?l&]kIur43 Y p6V# la,FF=Otzz= ]@=JkN^.|~3[aҼ(~ڂF}~WŠ(fy u_'_3.F'_ .} #םN)gt]$N/}Ɂo_n{⵿/Go:C}V(|u_R*e@-,`ZƑ0\n(Tgd?ú_aq[*tŒKa2[hwNDwkcJ81L!+lR|uDN0 `<_Mmff2n CT`7Al(>A0:G)s&>JאGeLOQ1 UYYWtj2K™Ae fD~HDT6{$6ozxf___omGqN0XVKWT8U\$($H&@.RTR4fOߓ4AÚmv~u2Z÷a+ŷg6+"+#ñ$BsItpԚrihs+Qg %"31FDS[ݟmֽB@ \|t3K8d>'IbpㇰG]p}>}ztR zbNC4v>`QT\APX8#h@MAZ/+~ke%-W1IuqD;*JK3ſt*J G92@8Ml.,7$o(wik&v_!o)yT8ʅﻷ?ҤJxqU*wvTkUc=lہ`or1{XѽOi[YEGNǠVj9kZJ;Wd/Uէh{ mV!oX`˝rrN2O;nYKpQJD啈i-kC:x? I{$+Dm̠ؼrfp]O}}@8A˾K2Ҋy>l|햟<}pqVȮ.q#mۦ/GpHbCRNmce9bĹQ]ʵbsmsKGτXt"z8^=Y5>Bi!SvZC^iF-y޺Q9g^7Sۏ!a>ˆ]X9z8zsTˎemt{g Y Аrm4G9z̏~;PPHOdwvrb+[h}7-;E bfIMڄ]ʺŭd< *(qkefsV{sRGHF-PsMw_hضZ=~Bƺ,|Zr)o}&c,f-eWֹVz.)7e;Ϭ;`L=aj3 ӈɽ4cMZn[]Rh֏\m%hXm >ANAFBZRp wR+&mOŤtZ뫌.6V_l &:x"0xqĔ0Ϫ$9H7Iĝ֊$vDHjb(\|#XjOvK*)ٛɶ GZ7htOM_nabڣ[kp3 kxsλ2{LQb5Ipxgc1ӄZ+X8GGRZDH E}P U6L4g]m7$!86&.1N!0RѤ1 M:ςṀc{n* MoHُgWgG~;qUd%"~:d|\hU+kbx_TE'ykJ›Uڵ;[0zˁ8)~67?_g|*٠P*ߏZ.m,w/si9pwWWo]VeOo]i`g/N;?_eQ%~|힝~Kj ܰ|m^oAnl/ ֲ~d;>pPzge[{5qdE$ӑ J[B]yk[`|¤Lrף,au!:i4FNb Nd< u-Y < L]4!pDqd-ֳㄤ[ŢϏomүąϏzv$VĒ%x RmSY=1Z^o+7/g3xŦ_mCòˊ̀paJ57$( uD DZBS/3%b+mĠ q")'1 )S*R`'۵ffd7wQs4u:`FgN}* 횄p6Xa:g(" Kc4*`M""YP2{ 3-\c)#`XbdSƜ˘aB9IbIc E!$Ti5(F>J8 N02tA!N%K ־ |9lFI8%rC`{`FĖ9&OT1]WݥosF=}F7meEiM S(ô-xV"ZZ^mqkW!Fōޝq,zД9~NJldTxiN]w=6C m+^d7oۮc9?^Wg.I'ݏe[ /&.]gal^xhX/,t1hYHt&aE1/TyBu?izV=r6\dF|= 7f%`' \L߶akyY,ed4aM(UBJtb }a,aRAXcz9:P +U9 C,;)? nX>~S#)s&bƙ "gvcTe&FgyTr?vo1R-4S剺 jAr~dH{0k.ZOh Mƚ`C]`p3 ,rK8^ >3[jQB"&>ْ 9µQB9n [ׁ)j5g"AaB$6Gn ,$˲`)daDوl}E~e>ׂrYF#cT)ҕlD&:eХo FNy[-K$2SX -\Z1oq@$xBz %B.+Eۡ'e!EɔR>sCd"8;a8 jX˨Q+X,@`윞+ yJ Udnj &8{u h5$NW1] SPkQzEK.n2IMLX{/B3̰xnFh> )\ZE|@ OY-KS)?G|fXj5g >MoY|*Fsn2X[V.-*epjXRc1O\\8 Og +~n!?6Fth`N{ZYRu$ߗ]6Gsh*Aшy#H>ȧjHFԜEnW#ˡNY,D4e~5giht洟~]琝2Wv%\*fzm(!4Vkʎ<^٥82cQ0 ,WuW)yQ Bp|cHRrіjV xAV<ʹk+T[bpuh5F6K*b5G %SSJ0e\ݨ D}!=Ǔ,@7K*r}؛v":YA"2CK5]?Lh⬕LVy rnU8i|lM&nm\N|lvKR&rEt{c-; SRSCk9icx TUs]LH5Wv'ʴ@@κ1U]`˽ ڔs5*pQcgMe^ . QD< =!GSĹ1PyYXYrήfuf@` 5DT>P+!B좈J%)A XǖSbx7`y[ZH&(cHfT21V1RƢ:mD;+Z՞43j'dZwC)b RI9l˴'NoXkK)Fl)Sc:@nPRàfqBjq <{ʤ8t` 8P b⨥QJJ%01Fc8X[׫blXG3ֵb`&wp#O.AxrA б _e[c,1TD$Rtʄ'R" -ʷr|׉.{ndyň!c՞Køyk yxIf}0:"͠(-j-a[E^aJEYB /a}._Lv: Gnc{]ۍ/n7{۸VfUl:f y;\Z/W^s:i.ZLKzmI\p4wn{ʕ]Թ@XbR[u97\ݝfMnD癅UU~Bw2PkD[=AR/K LȺk4:^It T:X$9\LLx*zL 7 _ˬ4wY. X-`R vZE\lY~4sI3,vCI5Krs뙻v*ɑ1q;*8F$HB" D C+3#y 4r1B C0uJ<.)!ۗo)eM#Nӡ=w$[FHf @H"$7s. ~1Z ]\ U۾΋UZ QF+aڔlR5L%n4rcWQ.T}d8a/dO2&Y搓09uBrg!pbs*0+|1P@GѭW?- =4%#O>HT=O{MF8~.Bye[ Gqd2Ï=I2 +v^'myj!Dg'< Hö+l)ˋ&.b}KE+x2P-cYid# u +RӬ5u!P^KzvCȮ# 3oސhޡ"c ie–mO?b tܴwf$WVR|g&"vX>NsSZ VV=(ATNS[P\ N,j44 T >{a+}N> G<?8q@ld`OS3JYTGsZexp%G:1N. _h N@]"7f3*•AF-|Ȇ6e[!FȰ8!˩ @#ө\ ^*dFleb`,LJZ$=Z+}yB!4F!ta&cUzXSNJXuSXJH(7-+'2"QDߞsYΚԌeP-@UdM!P$ȖaS.aPM\g1 EmMQQfӶy}ކIql.ԱL,F}}Zq,{0?R~xEύuE͍G5>.6L64[~;MÖ)ϸv;Oe[_n)n4+޼շRПpB]dEk*FF0EW$Se)ꤞ;r%JUT}5VozXax1SBo tLzT+6k;3p30G_֜pYt0{4lzQ7QoTpavqnhj~k|7VuߩΨ`~%Ndt1ѧ&M]_}ݏesnZ0֝dKdyub-oLߍݠta맄by އo@|'\}L"Ăvh!#Fy /~QWs@~L9kIm$]z=}uƭmOfa;tҟ5j]͛/ƿ6<0H՛?vI< ~  ƥoxb^ou񂫱[5z5L2 ߴj?q?|%QVP~0J޺(crf߿Jy?)O1'ՍimV ]YdXzGr3IJ8 )7)}?e/gLEf8O64)}Y4խPƓ5T?fcհ,R%?6?=ob~:*Մe^ A tk1uJUf"y08[ݷ7s/^S&q``Ll-?A.0ȃ5ah:@~mE5w撧Qġ-BP&0,JP5jPb i e,l[KhM6c-Ac-:@# fsD.(%B XٰC)P5:_rN\xNR|eE]+'#UmDc$He/f>R֝G*<#`W|!䘔X}~"̩ZǬcSAuDPX#fKD[ XMupӫ%<& Ews9p^l'<[gh%*zh_`'²Č|n:<#_*|aꧨ5~}!H:A=0Z\[WD[O_I-{FH~NJ& E",Ε-_u H lJӔN "55XRrn j,"\6'Tu+WZSZi\ +7JVknP $a"*^)D|YTn}V2jP-+zXG_*TZQY #<̱R&BwkGȠf`/ .\=xEOEI eUwW2$XD\LW2A"d\U%G@w:h6E <,P1EΥ(D`ziٹ5 @%e-2IJ+Y݄mYJpYqIe3%0@"?Y&Z,}f('nuqnXn bWZil1sjL]`O#5j_XNEYu#Ha8DqN (F(GÐDRkZ3oJNd鞞ڳ AX{ZA%cwG_Biml@߈-YN# -!ܵQG$CBEwBz}w#zqD̽6sW/ztwZkV2 Y3I{٫;zfdߙ{#(rdgPcCsf&^YiB9[BV4@`g ? )J2ֿM(:O%͙qex*ɡ)^ӡ/q&r+ I^Nl9/EPT2'c-0:^T !8N\;jv΃&Pk)L)GǾu2PHbV%=]#蹃T&\ގ,t&JdyEEł\<;7cy6Ou,/C ;͂(90xXH7(rq2NAVL"6xg <<⬾lж=8 z'V;AN =C} ET2dW~' <y `g0Þ뇚g wI PPzD EbF][oG+^ gw~J,` O oc1(lg栗o̐Cⰻkt{9ΧQw5fӷkrW҃ v);$L2hI:0%7AnF6 5 5{ZxWvd?W0iF!o>S{3E2NIKyZ<:>_<[|# {ZWi5Zď!=L&Ïy^B4ߍ"߀0hNLǻ}v Ι1x }1DB'9700ӭ˽ؗk k#nS_bg^F LD9D!PC^cjLjK](њB[ F],lRe&ݍ;DpF\Һ7"Y~n}Ʀ֏Gld>j\(@ ٸ8'ث'2Aپ(v@zeCjg!IʾeaP`};~> DtvPH FMɹ ߤoƺ"9P]ys 7iTM3"51msw}- 0ԜbEQ5'S-+ b98JiݿLkʡMBUlP2T:>  t@Jjy  Zn< ӊ N(pk*5@(*Gd i'#+(߲L(,?~~z9ۏɯ[LW Bj&?~x2]/77MOi1; Q:T΍L^P@~n=F|plQDC[~{pj \*Y@57}vZ\oSa Uv#.1BQEP&X %PReUtA3֣@$Lٖk[ mʈej9cs6҂ M,FEi`ePrd0b#ڭZ( /0`>BY3S -ԨU@ic7 by^1,et촱6+ }ænR9h(ɍ[_]BZju89Go(\\L0с]ji䔘hWqH?BEލ⫣ѫIe$z[EO;;rL=NVKc)|+IY|uK`H;4 !"K[ʵzn%@)/]\!e=i"@Q{d{Pf?w07_D1_}pȮ8!\Lt & S-|j}Nx5u3u/ Dc?a6 aeY  u;VhT%KqOj) ڮo\4EGZbZ n) Ʌ,^}":+ uӷsU7Q7G^eZۯ_׍7|%U('7 Gsؖ>8 mMׅN,g!tpd>߾KWK۶fR*%a;oQBހs ȹ2kcĨ}˰'sU-{Nj~V14dTKGw[JQywH>C"rr˺kRR 9, ̑Eqg8Lo-f֒cCSɜ|Fs9B6&9$ s \a?|rj"H_ݜjC8˱Su\asMp ɕs+]tW/Uji*z_HdK7)߃-3@]}~ۂߐu#urr3qEDn? Jgҙt&z&xт U! tEeydҁ.%\$Hc~{z3(ސ ލ8((0+sIKəz̵kLmԔY#YPVorDCeqMJnD0T CF24墄vxxJN, F34撍'lg)JBY+h K/qyIVو5g)"PKE#,8b4 s $^a-q ni*I sOfu^'GU/ {תb5ѥ74ʉx}`]0qKd(ϵYB*J;s dgTCĵ>Rh#{N48^w3 e՘☎V-Z^,5auKKLI1Y\PXD $\Ck,t{c4 psN[zyHٱmȀ0f@,"F'>mo!9'{ߑ!UdBM`">֥Lh27,BU.vNd!zsj)F}à(7=M rdKC2pp+ u?? f4c\Іj ˔sdB5ٹ>;Sl=z|Ooj*q!b- ,hj!M5/HB(q\d8TgV4HɨLmլir UH*>FRn:{0/}7yڹ;Y횼(r#8foۗG&~G˻ >jŭg 4[~6<{KV9Ҹ˙V\n"L枩h?}m.t8w>98+3Os =FUrq8E37][oG+^ vG{wГ 8ɾB<+Q ).o5IICrH9eF,Ù﫪~~xe-ԶkSPՒ޾φUP,*,`[bTx}K$JKLw^۵Vy3 9q/{4dNHE9r!Ζ64 Fa88Zт)iraCjdTS ɔ6@:NY!+4oY꼜QkZm'xMT yWw){F٬O+.hwV28tXfqs*]cf3Ҵk zszz&f/0C\ 3*+{՝X9v\\w)'XR\Vٵ{2!:fs#铝UaÐ%g_l-K7r[s 5valku|{FI?hUpnNV_b5Yh.:]n.8kهnYnf[EP~N=LQ)3k$J./AU$^hxvs߬ 5}CwtMB?ś^#SfGJ-uԟ?^ěYk}mݧ_>LW8?R5!;3uH))=x M 2vG.E 7/L߻7vIGd9<;[(CuK. υkjD[:ߥetb|&6D fܱ x$&d5A,_ӻI ȪcM[)_6trohB p bAl2PoU~}eC`)Ï[hMn ΤQ.xC/Ǿ1Umf&USKBA 85&f,p+yƳ:Uy@5( DHٷI%65= (}js qؘOL>/)HBYw8w%a嘡s*y5T-rd.gwf$UXNS 2S(+o.m a#872Br80Lb b6c Dh"{X5 jڇohhXY5P.>^1D C# Nָt4NLs̽ys[10rIR=Mu} Qu&8BgF{fsS~xҧMfUyTfSFLDAIp8~Dm <獽͏֥ %'aNzdpc'6dDޱ|OO 0<)xK.~p/|lzl'(kl|kEƣơqdB#BV(T ê0$L46!r9 T քHeȭf!DN_0LMs3>9L HTv5~%0`h Dg2 4 pF'A2QG=W $hc2W Zz_oL ro/N%@WCEu2π 1-Uk-i[I J+͸K4 sThiYcDPzF p D{ˌב9LB.@ 8YƠQ# p"(q9<]#ئlqa6"S4>j*@<}xYp8I<Л3a =+1?t /?;~ç ?πҟw/pwnP0IFx˷7bs:)N૽_>V3%7՜T:,-v{,S |9^}InTVRȭ\s9:DC(*0I¬pJ1&AxGtP#88Ӱ=[vS1!|œT*^]TMMF]ph VuU벟ooKbƎPif*/vx=R>y%iMzh|r=Cs4d琉2e)Le1Sm9106sgSI^0/4 { =o}>OKva7Z2C4<"b9b-C92$2g!xHs <ވ\()c6!ׂ8*.&q#ںh g4w+"uB:E9sL,c!X[u`ZcZYXBdXSVaS I緕G T/PjA+4 J0eneY䦒 CPiEU![B #p$M0aPk@%mgw%[_DJSUrD'Bs`*FtL=9?Ӑ3TvFI(q繯ҋ#atq&, INw9˵+uq:uj/NFgv~L9(䫽xZͺ^aAk.h=vHǙ˸+G'm-g[eAsZWe1mi >Wr)HG~S1mmz+ 2fUFiYOt&S|9Z?h1iJKQ-[ -xdkls0FGoh`2#|\-E { ^&|>1l/H(Gm޻ uUI婴bQy":61X+hP&7<aGKPƊtDQE siڈ<3V?Zhc!i/T֞S˅ؤ r:@5srGPѝhM͝\s+y9ʾTz3z -B)`@^922UuG}nEa{JC0\@ho-D}Ϟ.]ix{ G:.o ׏R[.7 2[Cfr6_`4e v'^'檬ب,\[*rc@His*#Dr9Û#%#~u(޿R^#>㯇Rk)*zS*QS!5CW)-@s@&ϕaٲRsJQ+.gCd*Jf:Իnp;290ڒ#uqю\ݛ$*oRRt, HN8hHA2(>ь|5@3i>U0(aײ(C2FsqT9b>ad&ZP&@ڒySZ26}hQ3|[%\n7T՚C/thG`kcb`o:;Zc5|-#RI OWФV:sɕ&:n*<^OVXFe,mxUU ^֘2^1hL,8Tb޵m,"SC@.$N[Me$TTQJwԃz")K(r9ٙ&VS/Y))Q% fHyq~%A9}f(}Gx[0y:PU}|Pd.:ے8YDž<޸()&2q\W] -WLl&Hj$֞!` %fB+"t^1MOykqwQ8PG1\Ɏ.ڣG\%&jbEZb|ն~w"UQ#f #v"Aөh{6`ANj'lj޸!Tl[uԷxVHku 8דQQR"B3Us`!؅[54zPj"v'+>, [Xr#p8ȱ:Qg=;8P(*:ZwXib(0)+TrxTK^G7CJ)Wl,Q'&Tq+5b/5:{Qyչ]9LiBJoۛj0ʘmO|.l]7YnmA=xVETJK M* Ɵ9]eͤvwF01Nݔ7{:Ν|K7hoQÏ]yԯaDF8nR#{f*CS+`Hiw2gJqCCaΊN3*FePg񧜭g2#+X3cwLG$P DuW+1opMF?$b Z):3-A)CՍ`t;$ KNY{(B)tL|O H? ""F̮I}briBr*b\/79^%|f2k߽SNs)/j|(xߙB-}q"MhZw uzvCW&>"Z܌Ģ~]DI/#5{vpu)''.f=Ui}z.%bEd]?񄵄KQ1mFrwỂe1rZwds\}T] wʯjqL,u?Vr8" #%t h Lo$9I%ManS7[n!%8bp&''irIއkmF"I2Gl3ξzG6N6D}rI''Q5inS"UaKP(4C4"WZXc4[EJtH&4mu }k5~R \,SEmR 82q6yw6Fm\-~b1gJV\/m%򴰡^+NWIcGI]VPV2YӜhQLHI|Rh&[WOʐ BB}# qiY̷ZB@K^&geƽ4}$%2Sh4w)& "W 7f [;mL;~12I1W`]2 2kTLXp%T˕*02?+b"]oD W{yXJhRsc) 17 %wDH <9ƒ & [Fp(a70;f\eWv])ƎLJaǩdoR`D&[8Ή5P-N9-4Y 3υUaBA `,gW @kD[J2\q&}1 ```5L F=;Jx毱G.bl Q=2Ccwh ݛ zV#T 8Ÿ~̍\|ԕ).h']J;ԛ4lBPk.vW,  |_q;>FFY?-rkc C"Mh" Ԅ:8 Q)QSlZŃ I6R}&<^_QA$ej}ɻ KAov_k JeY[n39K k`8-Wü))UZk4 6Q\eaQ9E5@xÊ^Uv;;Z6PIֺVB[\S>E5ƈV|NK)yeZ-:$B0E 1 >FHP&r|dqهp+w)X8VAK!t7:##)1R&q}^cAL)ͫy)x=:'ɘ siEl)Ujzjv{ހw|]@U]%;[3PX$6ERUKI&H-;1ge@DT6d S_2Pd Hab"k[.&Ǖ/48θu ڿK%{je^Q@RNE<(DJi%h..YHL^e|3Fa *lt S*?z$]P@`Hvnӆi 0ٴǥ%ڼc -̄$,4#ݬbfIV]Xv9ڮ0.hiΚL5MV"?G]"rѦŤB(6UFzW] B ]BU~+(+TDF>  H<$"- j,1b0"!r+M]q**v ~,CԢr#{ |y {`^@ $jcޔ]WdL Bȴ Q!iXk *THā7!Qh2kG6̽t|X#Qb1 0q)ĔiCP*I"塙L6R5'ut ע 4WV'Qmi˩@rU. ;<V*"'*%{7 K݀XE("a)9ͧ搄R`yiiL P< CVh_ i%sgq/Q&&D随~oB*R[) ћ*"Tb 4 6H%Ռ閚AH`!Z]&dQt0RS{%B d-A ɘyN?+O KqfT̜L'їŔYN7Z8P%隋/vO!2ZiB)uŖm9N?cDѱ~F2046r pEY.lnGq\ Wl@*~ |lUg%:`O-Ү8ɦ&YF]gWVJrPiv:P%’e2<-_/NhDuI}O헻0hI>,g3O`gÁIb9ƳƋi~ryi2ዴg針pcPHjc}5Cě}2I"כ>[ #D};:LM03?!1ȾL0nU;3lq>UܵvssaMg .{_*a'ċ^ӧ|_;A[-XVmX K6ys e;͙l+xZ&7h+vWxP˶mʿ7l:˄7^yrB^h#m}ۛ_|}wٹ27O J8&}& &oFX#,~&]ycSҽ3-i^^%"ӌjT< G40vsC`QiphupbHd٭X菍WUbpJw֮l}vܢ[ 5km/]9|? nͶ˜!9>~Y=F 95 6F9<$ː%)f #ةR-v &kjMԂBҬ(4J@)<1:Bd P ][M`J7%PcRՍÏj]*Dysk;K$je[C5sW H6Z#fՌ[N JX$eZqkN5d"q/!>(\=rQ r"xkOg bb^2I`!b(۱z^,g-A~tÊkHWW\ ҂ ڶt~*h>1zZ'+vttjԘ:X1Wјӝa2bEcᨠ0MDP֣3 ǸX T7iD~Aܓ)" Kv#hGCגZVjl7~M$ T(^XB RD/֚e1E3U=oV$"j*= 4v/5C0kFu RrL H,?2e!3b28@`D0 =y)\bxwdyUAR-vMy究¾ A۸?cjW섳mqYRR001 nM´@R1LG;SˋϪx7 *}-ˉbA3:*4Ƞ]b L!nE%Kˆ @Falb:Ԃ3l JCYm' ADIa,lXj1N@SR"(AT*[ʇ]*Obb'tv'gGക׭E4sfjg??fhE<%ˏIj5 k^xʾZ2yJ-!gEi4;WKYFɹWOsᾫۇ޽uņ0toۗϯJek=}{N>>+=ZZ>,C# ” L%v)QUkp@xzͤUkbBpyML  ^g'p^6uC ũ<1%0dl 3J9\ h5VȌD j&Pg MAB`ESƎ1ւ{pE=)L8=O5@WB,k{f><>\ R6ANRG2'ʌ01}p(DXjJ,~J;eP%kwR.[+&P&1& X r}2)!&K0"S 9C(U1oY dI \V EO* dD` ֘"a*dY&a5k)#Dr2Ȣ! d ZG?5'RAXd1YZh}M{4R4|WjC X'_&S5y7[d#5V*e3rMT~[BW!KT(쨿{0Evt Y bn/sSr &I[ ͭo[NwVMRTC1T2,z<ڙ3?"H>s!y& |+p*rZc&~q٥|NN,ua uWʺ%@3rv,S*x-#]"*Ї}Lk'34MW,ܿre2VU<$Ep,Jnj`"H~PR"DvCT3H7EX`uxA/dp q&~ y DCA\ 8~huR`їu7{d?^Lcpqh %bT Io.u?&矺`&zʣ=o❤ b3鲨7 @~[c.(Y?EåML4z*`I+Q z;{NY8Nbi2+O|TSwNߕU v :@L5ȌLL԰4M*K H~tyc}>ߌn]vr@7U7NNFɿZu-\~4 -Xol-H”Y]^ W1d c4CDIf-̫wroxJagoww~VE,zx- ơwe9x8 i M'،cY58D%|HjXC&h n1P"TwTB)ݪu\riC&vl(Pq7*Ξ!:h.+pK&RrsVoVO6Adk $$snv8V]79jDgs5(ʾ )il$j3W-;/="hAid"V&3"H4rgZK)P .4NB+X 6uﴃňv~q,hb#[.hrCqBՙ [DCpqAqtv#Ra9e)\|]7ͦ [Л K.ҎCKWbgr`BLtfѳVMT&LKgl P8/3,h2z*=^Pr=੅4|@3O[`g +Uy/+4w!clMz# %!mjV''?˱ zC֛Wyl3V^ ;XpZbb82! t]jրBBjNpG<ڹ8@Cf-J8(֌YQ=p憹wUt \B-8ş"tM{Yt2MBZ OQ1 ޮ<=?^|2] VHO.%uoEHhLy2,ʖ75i7[n*X>1t:[F žteĐ/\DۘlCnwi_r/4JA4RH⣴7&ڷ<[B @>(  s3&q HJtݵS yT,'8fR~wc{b3b@p8f<|1]@੐'$r3,8'!8Ug3*5]tsYE9y`{{[[}?q8L ko~nb!7p As6'R-߁9(®wS*x!yz֘Q<6H1ɠK Ҥ^:v*+wTW5+oLg(кUOZ^>.-5sIțg g3I| /\gdpL;&~YyQ1̟ݗ=k(BI۾jh/j-Z=ŗ!ԏj_ڱ`_m $ԅ-j^ nD1k%:-uc =vB[]BO.f(4WBd҈%]/RZRcʀpu PChN]vX LjB-Y՞G%US o'/\"">ݟ<\nOANt>o^jbJyt|\ғ+j^&C^]?k"0=ۊcx "}9E""~9erC)tĕe"Iq$hfmzTH(R{pe-$ω|; 1D5yW蚨^I| .$VC6D4Vm8I3e\o,%[qLϐ&GlZcn͍9mFַ"/|?0yo̾Ϳ80v]J: %7p,dw!=/[.gB'{5%]gtzym|O7VO͵o?9z0 {s/='ˑT/_z"- ^3 #W:)xKZeNb0M )ƚ|ہ`4&4b]qq$&2T'[ &%bm8EkB`oRJ0fRlؼZr}i";&Lgٟ.,lH:2's0O(!j8] ?xdW>}|_j0ruyBLg.O@ aI13%gq0ey2]AOGH( W(iH\XɽK @R# TKlhR,S0͙aZ~7 vWr;m6ZЩ2 y /QVZb^WCU@ TL>$BRZ5 \Jm@^j ';FQ02h!GB  x5  :ՂL% #Ϭp4մ֘:hq?UM5YG fXy5ʁ,u^N% .pE) XhU 1fRmg/6ut.YkR* FBIz@ jO4PMk&LV [:눕.Vz ̤H]W͝v2Mє:[mMGZ2=(.NSHz v@&h&X L0"P9:hJ`rt+ &%ZHaA`Nj (|fB\⧒i/8B)U*ΌJbjxԩ4Ì7Dcz/a&%Ƿ&- <dFBiv@^"&fgB 8S[C L F,o3cdq7VͩU,Q^IA/9P%B !%YWps(`]S2&ᶸ\Da3U0q_3x|ȲŰ27z.ߺ%#>G]>]0n@< ۋ!AD$'3A:+p7&.)NglVɈߙn2#E Q;R,B_]/} nJhVC`8ϹoA`W[Ux/x9äB@Z` 7bki}6j4O2R4rHbRG_7{٪D2:I*S5WB#$2M?C *DAȃR(2,7[2婈>\pE5oU0D˹HC{M)P1(_ r,N^{*}TP2F@d*f1.I!t`@:>& D@y-"xzhqԚ]sSP QfxsK_F|7% 1R1opidٍӓ*7%7SmEu@1[h `pr6"X>b9w q~ @K1x "/p$cM9a DP|9mUd֌[L š̈WST6#M9 ; -[h{a@g|E"&%(pcAh<[tS !b #DHG{X @tM)EZ҂46sL3#TJ%KY%I\(BIbcoM9A:*ЦIWM/Vpd$ ̍ /djYTV B9a 929xrP4 h^zaz>$}D= ҭ7#ѯm t%(·GD!~CBJ9G!` ]NLXw0[>mpK~d ϒ%.ld >>&my᷃嗿_l=<*sH4Bp+ Wwb2c*s4WЃE2Yv&Xs+]Yfv` 4a+cj:_O23n=[aƥ P𮞂:ɬB$};dyVwN-&[}u^M "y*&R5IiVQд6 M G>u&4Fz\T,z__ͶE5t.1+& L"vd;TeOrw$ig%W7z j~5ȣ;xl jsxi;?՚*OCm9(ΓӻS0֬x]{EנZ#lwt H1iY0zŒdO-4{̊?XInͲ6 e~UG&7  ѹÔ`Hvf4;t6LmLz$ϳtr/辳K]j9VPh~CTwhDWh[GBȮ9ꮱk*A>>^͗^#Z1f?Lg1b⻨7>M~0.bNR4fo7OυpTSQ g?%ě),h葸aʮ|ς;kPYH1rjSfqLRzm'P,\qiHB^֑|{Fڍi%bPFt>v;`v@|[ڭ~bvkBB^֑)ϭ䚞7G1|&lBs+džwP/ODAv ^ :CB (w!_#68CeZz9A_?:rV9]Ul;xZUyp-J\iq`jyV*b)q0rv0^g.ĆޚG|xTcճyƬӂϐ'q$rrg1wq6,UÕ?bA4do$lexkb|ROҺK*.KkuŦ %Ro/8aSW4:,+a10bJL1 ]fR}Л\CIr\]\#_{.Et׊b-3.^x POs2Xj3L@= $ C6: T)I-V/ 5,h6pC:1.-w΂bIȰbC( &G!;1g6$i*)wN;q&tqk"FnmǪ< |\0v3RO|MF{< eb%bc%Z)πUQܼ'\ΙB<QRZę&gg^,O2c3N(ejRY#@Sﯱ`Kôlu,?&׋ŭb*HId f>ŠA^HHs) +x;Le%d8d7-KrauL<#%Ĉ 0LՔz@#F6*GjVb\z&-X#3Y$R581|T f0 (hckG!V kik1bݙ? #r>X1$ A`pg68z](HYYƾQ*\1_ZlmG]oIּR8;[K3;Y^Gd*.& 4PԷ:0y0ꛩfN ꛐf ;,zr DfmQKmJrDP2xA@U[kʐ;rZRkXS˸%9em/wJؒW+D-F!j1\O?._ R -3̳/+z V]hjUF-uT%^?5x{ jK;{[>jR;4[YV|u ,&8*ry~p4+c9$bg]vW`!AʮLFNO7f '8L i_2AݨL!$YfD\qtb_#vm8F*&;T3˭inɂAb9Q6HS-.U.6k+ϬRA,uTJ VA5NNDIG;I6*rRDrO8s˵U.s/Lȥ, 1~᧵x*M%[YOcb,ڦr}'xK]^'a/?20oyF!'To m!i^1@%_͸I ng)=*()E}Ė ޓr裏 DZt<Ԋ?oe }ۚa|b_,W/x[~e޸Vn1zsn=>RKo.dt5#*ƻlDw,k@B:C4qyqe\ k)8/^M N6A ,a ])=vH: }>5D01iF;VRB@X+P#ܚXL8ʟ˟*pK#!*;I&ٻ6dMF0<,]Q}̈́"e_ I /3CMck_UWWWׁ XyʹHùr?n-Rl6\ (/W Vc C2e(VAZMTJBYЊ]l(иcb!G0zrK.Rj.NP`x*;Jd>*,Q j r $l$ _DZ\9L5U^oq8FR缢LY Z*G45+T9õ "&ə sBbS,ek ^š%(~mCARu ,186W/朴F $erٜé:nk ဖ}*F&.Bb*ݾP T/K:zؼ|]yA( yr:^>߇x'{;f1l.d+]'S@/jX%h/tL꣛/n1gx<[:ν0wKGQWO 1a¢ݿ|T+qHw9M m߽}(awI< WhE Xn[jz߬|Tƿ R>[z!a0o~yd8|   Nςu+pD0-LBN&woxs!la ul_qЪ|8[/tTkҨW8=4-*A Odj$_D{E>F'|4 Ln<3#pkU=cϦą4 7ے}ԵKW)U{ՃS_gN8į7!p)x6O> Cg_)y&j]W<'W 7ЭڟݸNǣi*Y_n8ao'`B :Mdj5fLIfn7 ؘ7f&ED 7FO,`ń8'kP'K Ōߟ''DC,8+%8&劈V ~`Mt`lƣZ40FxG/c~w:$+X Uܶp!kɛG_,J =&8,eg#~)F`9Zc<шcjauD=dptt<ӷzRJ,'T*XCTjk]}*/y{ҪϬwO$rP0ISIlğMɉ'$ |A/ۣk6]#atr샩PV̿ʦb\g)ݰCq_=YS@_u*? Յvʛȁ `N_k<3spFᰦfUhtzY#7 #d7%֡{7ݙp]J kQ(lx njWN!vНՍmwL:0r5ék-A&"^#Sv rJ3DQˀJ%X oLC!%f/$A`B%~ mI|WJk#/yvQK=pD!`kg5) bkf :e}sD.FQouV DGL8)PZ$srtʁHbŽoBmn֩&Ōodq̡ ʒIҴdÍ{*/kqmwh_F1mߣ!cl`;44R TnO×Sos)/~#< uxI+dښ-mi!BK{oIYφ}c] d:\ӟ:JFo=W*wbȥ'nM {$!.Udz^ucdڭ*1Se 5MUhvCB.\D+ɔpB 26N2e,a:cם*7ה&M.[+"J$z!J$z(|%UхeAĂJ\LN8/1_eXzA{iٓZ?r,ІZ2T/^~0 d- RQ>',Q d*M5F<~`M&g4x}H!oB> E}~R" [S~t)XIu8t)x i ޯ\ ]JQS:nlq0Or)m9؃R`K$km~_F|bR^WUWy8kչdNUBepv,\Ip&nRt鮝TvȺ[P5zvP _p`cn6???tg~rշC~h}J_cF>̖3~9h0$-bi"2 GL+fZ0 Q@*<QM 'rJ`0֥L*5JEL% չ2/u(ϽP d)jt4gwנQK K+f,(")yy1)X2aAsI;  zݹ ّJ?N#J ka}ɭm.BU-N^(b:4c/sax'lݱJ?ln-A#sI.&ydx3vٵ^my-0Y7ӛ<`~Ch˧;߄>NB d|߇yWs4p}aRs!78!4n>Esz%B0p[IKyCGǼ)NobY5bҔcT ȌpR4iSӀhH B/ Do?jr/lN9\rdaEȄ"Ê|!ވ$b$:)>PL1fo: 7yׂ3P4q܀`{ .1`@*t;uK ׸ީ4b17\oM[; h3{VbzQzeOuǏGݼN6V'ؖlzOw}8rPʦ/ihc 78gJɂѓp2!mgNsO/s) qR&XXeRl.@*(nUKrx̃&/t0IZaƫϖ#鋈_[X yvYʤ~ŵ޳ ZF.%Sϋ/HpK<“] FH4vLq5krs5Us5^^fSYZDKRB8'9? {P~HxNu_WT4]jpȳ;NODwHjИb}2T`!qq-tD(0BJpOJJSQȣzIHϪ$hI:T[mR`1i,C,uLI 5Kj-H&Op܈9Muq/(^PĽ".I'+?o8hΌtjP,eO)Fjn2z,,vHjM ^CW"̸Dzf,EwWǓ%#J+HK~ZA88(a# A˰šyF+ʼnb3G) ΅lUeuvG9*9b 1)aI1) ;ks&\^]p1R^-`onʫ {̾uΊ_}J`3o'0K6{5fljy1. w}jPfwjgnyچ DZ6 6SՌ> f7~RFf;0ՔWJ]TfH3=8o4Z65eRC4i>>Ay;Zd=RN4P߼X8CwyAvBCwU~}8,'3=[}2EQb/%<F9LûW5vpELjZH$jLD#`)(F RvfwiC/c,s >K Z,Z=U"9•H]: ^]I|{JCAWeGf'k+Tkک 88B;Q ksټIXݧ!ȧ)Aak݉6PB.EACntޮ-/ BP,}=Jآ@z:BUw;%]&YV.%|N*=?iŲzhD{%RbExӦʔPAXA$`L|'6f(Vq>DAlKNnNm _Xm߼tIk1ǎ,9 R5w:9q]^Tds?Bةv@)Nim\. JCss IP\C!B9Ĥ)tpUӤz&vEcH_):XR\u>r}Ӽj4Gjڸ"He'ܯ\J~zR(F^^~ G"aie$bt>BIF4:fBV(FL EC,(EhHj_,=p*:Vkq'j!a5^Jjh8T WO^qٌ_gIu$,bj}eymJHeOV7wf흖4qlRz$;@V^4%QJgK:<];ܫ@Nti/fW8UpɡdĈ23/BV+Ot's/n:8X]D\! EX&5m{!e5ixP P} # @ъ)^Y$ZuÛ-*j]9p k4zv~>Fy -[:xJJQ~;s2( Y$ QLbLhf O4z8Gē4ʁVJ_]TWi19טD$~秏ͅմy;%c:1:5}@<UVt6圖$ z+ajIʹn"N2.~]9$)6ڗPJ/H",NL*Ajا:Rö` tYR E +4ir^5hA 'h^>8&K`"WPr0ftmV_FƮINFyX*ߦ gyJKqG.-q6XUTa%cGĩ 4dB !H#C6@2t&sD hiv8e+^xjGEWji R}ubPޱoM PP&Lp%2WJKt*N N3T133K2SSQ3 :f%ĥ-+]Mj)"F3cUOHOmNX~Phu͋;VnQ+3&SҎ VhQ+Z+}=냢tJץݢ(dp) գPCUN_Sc?M}FhMtt U=\JT+ZqQ<7KTp݉-42 pjoM7=W8=JSE`}$b$2hX${ؕWlb{z ]w]GK,3Aj=c'FC A-p7`LS9!9?A'"g8SȞ(݅Qt6okJq)B!^nK45<~Ճ$Km}[$|+q?siJ;]Xb@7wu?OVfBdQ[s.[=j] s1ɧ#2f{m䉔Dd%F1dA(J m8MEt ` —F,(IV#XΉ"B JləZe X%5jUitzj{A!eib\)2ph])>2? b IdJR`;*4g$ьybRs:z ƗPfly( &>kxiZ0bQ<0R`GZVss;.~ha6۲]E:[|JV94HOɉ|T/>J:w7a`[o6Vo8ɃT X7xU)h7ƿj/[}7mx&Ro[nEٱ| ( 䛻r>sjDt{:D-UY  zw/(1ڳ,l sp,z6 s؅oq's2g]eq9סt. P )OU3؂Ȧ/5ݍW1^24AcP43}oI^W(p)Vph`Rᡉd"uZ,qv:>|ߛ?|_˰N=b: _}@s\ή'QIxfAϗB3m Vsd+'_*@Ԝ]Wy2P6ߪs) zc1IgJ` LVR<i3Wgm8/inK6Kw޵5q#٭pmC󰮸8/)f)K]xR҈sЦ+q"q8h|. t_=ɍ-ngO➝.lMG8o#1A\={k.Lmj߿pV a~. ҹw&wo`'̕Dq2eːkY" cBc -|]&h^K-hl ٦|GLܗKJr%<51BCD?O"ccB#؀c=O.Ez_A[l@ $=bvC.n ׸&!M?hy(XaxW7i x9m(u0'*NG:싎Vx-i_K gfoʠ\^Ik'6%!O'bo|*O?NMӎq?-3[іհDknWONh7!,kV̻}?ߝN`Qe|~>2puq^* QEjxzJr$:r 0@ ~{76lI(Ȏ& N4hH4;t^sk;tИ$U=+КqޥUAIڸZrHJ֦־ܭSUi,U S`N@ƨ!Wf$5?~#UUr4jeLi.GU1 N1B+{qYD-Lf"H =I|y;nAWQK͛>jqmk'. b W.2YWH*g2AE֡G36 kGTW2bwa$\>=8dF),ow@j(b` 0 #`MWݦ:Jhꭦ%Vc1YVk+%0( ,D|Z WWoil L$;DqN^A0iOKBP~M<}#Au~z`4Wp\ֹܹU]JP9&%sxU 2 9^YVVGժ hMuQ"ݕ!d"TrJ%ZrNF_gSY?|r@Dv[F{OƘ!4>υrJxpEͷe\iE]VA[ 9KU):R T: Cyn\8%ubcr GH1ܹkM2uf1:wèծieϾhg%%P= YeU]4U~~i>ԛ4Uꅨ2$w,\' 1N*\:+ƹg/!"J! Y}A|<-u,`-֪9]ΩJIci0Fe*xSiE}̅u6D#鼄\N:bN{=XP;#'D(wE Nmݻ">>P6A[TQuйR*+(c*ZnjҹE&;Bs@d%DvkQ\ Y=|TŷZi6$'uz{s^~@H;vp^spuTCǪӦK3Ϸ$pv(=ύ0is/qwĈ"`AC;!7mC%X,COjݻ6f>;V\1CeҼ?.]BN4YLCkFP318}0[*nV5游eloNkδa 3f<3B.`I*j2ЄR t6M5I$:)/xT!%H<~ )kW#fHR9RhW쨸1n4]\bchD3s9RxƷ@p멙+0ߟyhW-KM?.qfZNVJ: 5=q2Iv/uoOkқ{,t\,V%ecO~Mn5'3{r2kݮ|̵:8 wR3w_QO.C`ο٥=_{ߖy{x w$##KJఏB9P%8 nl>+pzq:uigp^;dC:7MULӎi8v.5gl0a7)㋞"Fv|=MMw]b2ut-0Pz,EOwmzwwmYjƒWDy 1*癥9vM%amia2[oܰ.~CVxp404d&D?2miwT O{%J1=zߞ{c[1GOہ㈮{& FX=v8?BdFyt0DmiwT O{%F$mUEvV&==&YC0 mnߧW(|,jCggu[&=8:zG nOH4%OA]<8^;czĀ$Z)3$SJf=]̧%Og!FIo'7ۋάJ^8(ɹ0>9YΩ¿ 9DM<+pe/SpjE;=ɳ4/^zFL̂4F5}.:O9w>-׮Χ}޲$?wfIJJ%Rfyf$Ca_fv%.ył]- ZZ*"Xʔcz,Nmnk쵀c{תsڌJD )h"z(617ƍyvs, Tąd 97ܻBt̉|4㈣JS޵"}FoѕJ]{O\\ȬyX$‡B9V!^uZuΌhH5ښ^9*9˔."Z)P՛*g~X^T[q=3096<9GUI ܨjV|D]X/jsr\*QN5w8PE˷)43$T{fI#b"8%rQ*6aK߿|J#%Q x.2\/# kДeh;x&9EKcD5|ȅO:".Z?חD4_Ab-QjOT[! IHbFZo]DH8"AsQ"  quvI$`ǟwo|xEZAyw#ȅUqr@Win hщP驠FQskpQ@D<136,/fڢ3(: C[>4s_^>5_ܭxx?]^~uҬgB?W[wnVP|}n UOߞ\#RMq7E?߹_Bq;6QTi:sqX%bVC9Vc q!ESm&L_Z\0WJ "J%SPls׮'XqFu"[ٺIKҷJdŞ(SijYzA+. ET}:h6Uku@W-F5jU$5Wuq- *ޭ6BtYo,P!z"P4@6J|W-%r0PKv+o)L?g⢅>V^VS="KCfY\vBWv%Lc5\=K\i+4\. Yվ v+̋oF<\9{{Й퐆!bycd\F4I1{,tRZ#M5Y'3H@'p2秝aj'eq FfBw@N =ӑ% ˦Nb ߔ0S.|آEI"sV<8ily=^qZ?*%4!IE"b̈́Ka݆AԊ:[]9+σ/B>ZN٥]Ђw"JLr*t%%OL#@T X/p.O'4G& ;3w&l)Ыn(!C$缸zKj&hB+:1iUGB:kVIRJ5v=-mg|>M׏7k%^֦zJ2]r-@j2仇oP 0)Ac3բ(kHxgy-d4/=ϓX|T/ryvzuVHt?g{#+i'\]s7>0Au 'r`t:9WXAxT) L 5qaZ D#*J.z݊a{654Hm)}OLjDJF%0d*-9/np\6+BR#w :F}ȔkMaVwK0c 5 AZmr?VW y~H_V )*½x2τ(21k2hQ'6 yavM.7_Y.*_S{~0YA/l/8bgqy`6%<9 -ɢn-vZ`WbKx+: Ik<ӾƻbEXn|c7d#}{䰾q3F;Mi=yx2/ <2CgauNs-iF|F);껫*9hkC\乛xk۳*W&<(5F;p͌܉n;(qΜet#A @ʒk 6.tú eIttKqF9:b2P YsɤBpcU ݃s#g^V JMWaY kR6w1p\Ip)[@ {f^çŔ`n](x*f!{a%ۨwJՃ YYF+3䛩{B^ x ;}T΁iB1 0+- #h >9cEM5 /3DuHUN!< )hסXeE*^q$*^QWUZ7RUKZ3i=/Z?ѹMAZ zJAm"NL1%HmZB4(zmGѵ Ow.n4T۰D$ ̘" %9,Fa% 5i?4I]Zy ˃S6y ӡ3N$ 1?sZ-$H4ͅN U<)3e{P4PQ8τRlN^ NpcdN:)8PH䐻^Iz /qttp=8nu$} *a{`#zU{%vOl2r!DŴʭA3n+3Pa) 8CF1B[B3[wVk#ںuL7nB0GL4R29Jhsʌf[c(e@)O5Xds[k&ޡ@ jooZQqLuxZvz̓*QX]0/ >I94>jԟ!:͸ۢB3&fQ8su}d 3؎,!u r`g?92eu3 OS_Xÿ/&T.'Nd(Q i+W$~u"`R1Q6XML9̺%4ֺ5!\Et* @j׺-Imu;⮘JݒkݚАW):%i֍p[*1Fv3_vfKGZ&4䕫NE yk4$FY]u *5Um>]ɹMV< (lP-IjIcDHWBrcuq1لiqI% D\]\x4Hy H5w,g b Xd;CSGG g$ j)<(Z[Jf?ck̗a˖<-^"V VVSn5k .*d4,=+ͭz#)5ƊQF)6@*(@KJB<ǼKCB),z`o9}Eaȣ/! -po>T%OPuG_B[p)= + \Q>~e|Xm[0o~&Ԍ■~NGWz(0j؅jwojСzá+P[vxur4^gY͙h7K \U/ $4eȦMqľ$j7mBiHU4#Lzʢ3 Rfљd/N&%遐ɟ~nW+ Kq*gw^R_+o5oOp/܍Vj ˑpF:)T3!D0К() Juq\"aVjCejd+ێ)[06ɍ?]&PF !4`_\vf e~ҿXa~z8\5|g5=e~Q3˶ id\}!N 5]mcLh+%櫁CNz"vU&q=D QnYG/fg&9QxWg)h\<|vkh>q@iYS;/ƞӂuDRy Dp_ OͿ]>rS"B> 0HpD{.? {^>*N!ER2۞4Rzd Ԝ+YU@-0N: }y%AAmD }޾l7w-RTokA iU'`ZX ҋIjJ#B[=8F6ЄslPFMYja8"8g2h(Dpv |Zg WsL6׌r4&ׇ1ӡd(tDp^FuzTP-ECcq#7ӭkv5:/WE=!4)BZôKB_T:%L&=%ҨR;@;F!ٍgFێ=ta ]? s՗!QWخ~)ykkJ+LNwGdU г:V΄τuǘ !iމWS/&'E$ݭӝn4[OwSR;٪&|H2%mZV;Ov/.t,? `7*˜>a# )bK D(G'CwzҚkX&aG4;-Ggxl a˜@_k\D.g`׍wl ^D:^|+$z]<ݹ˻ /aԧ9 _g< )=A.v}ńIUkKGd<|'*&R6~鞥Mp8G]Hx:iM\Ճ2xBiAp-VÄ^sȩ'G1&0r:DYPʕAp$es"JS.x`63u0_`|lmH]8[m@4{w"AZ`&'kNxU"!~/IFѼFy>]m-(A>~| |=Wܹ_S/42]pY}͝[0x~Eֻ?^[gf֦*Wp߀j&]nu_NO&NP"`g|lqxK$:¨]Q̨G Ml~+U6GO5cR2& *٢k,%BBZ&DE&+,Lbh2lt~ ᖾiQ /#j :XU K>?GDn7<85^:sLzmi[¤`qBR 782dσwl|򴵋 ϮA>}&?' 2NXx/c +u3.g;+.?0:Z{?b3wῖ=Ro)6 ;k췽 w#`%g'wQgbD0T#q… nШ 4R*6pH[fmK 5}D~rR OD ڵ9)0@[2Bd 1^VVdh]iozTqp6& j77d b@mA5 +B 7LOB횰/U&PttvG5l|/#Uh=o6'wyFF~[YkI@,bE{upr$8Sh:.Ɖ/^ kV.!JεLQgfB)Lh rf) Yds92;\hI*ZA*u*yr[y5MR\^`P";wl6kQfgs;-W2r_ٷon}&o7a,V0sc XLȈJ,n\* 1)ˡF@̼Be`ubp5:Uf#O}">ϟ>59ռhs0A緃In|K?p3 Y/ďY&>GR`@`͸¹1\́DFJK5Hį'DqKն$8 %h7g2y9$n4ٰ}]\VjCMsBRe Xe\3FQsk d8ϭDJ- (r)* '/7g !eyZ8ѳ=Vq݇w/a)ϟ_χ~߹%K=^@l._O΢Og>>M&nEOEͿߏgyX{woNn]mo#7+[I),=lg'g& HIےZb,-HnV?U*EϿ=} Jp.9 ;H'wwRN;&뤜N5^jtؖ*qɒ:ˡ&}ܝ_a9 Q^D! ;D1TI/kOBs(\}V{95#(h^HB/-Sd*BZ8" ;JAM|ak3Q2`QI;S:fX7-dN(n9/ ˬ1"XNE'H2tbn5Jn~kB. Ѥ,ûk@Z(3|#yzWa-Ɲؔ~dJ0PSFͨ6\@A9-K۔aW: !A{kAK]HAؠdҍ36;;Nۭ'-な5v>FrǍAQg$WxO{ PDӱծ߅΂*}2Eh)ޠp$A;Ib`+Fu0Eșj_|h׾ " +ZRo BB/W?/LW wsɝ7#p 0P:L%?M wy&4yOsG(yۃ3p75Q˳6 ջo-ޑJ#%S_'͗`O͗jPIT^-ueΛ zAk/&v<(+" [#2HW[0H?ͥ7Шߐ>~8 ؜P$TЪI䳦@A*Foi?\RCǺE@y<]P~DY& :ӵNjL5%KkQ:9Lc]wEVְ9aT *\6mycg5MC=z(i*Mvg*hZ1t:1IMc^Ƒ7Kwf.eŤrMTR*߀|$vo2in@#-3[T߭-$ˍMiM$"ќь+F֢*z1/ݵ([ZTk`%O5nj,2r8~)"Y,A-^1p=|5 ~wo,(DZcRLQ.+ ]Ƈ?]/ |!> RՓiej0$wvW,xx>-n$+֘>#>a;dW|TM ӽlSzUiov&Ҝ1%@uqj^*`C_"~ۛPj8V9l #H{BCh! Ϗ;>ճ*z{ 1'v$Mek= &>myB=O=Gth Dsb"H*YeY?o#ϺTQF)_f?"ycn$ǻG̅$Fb狛k[<՟dGu%*JUYO.cZqnP8oyaU+!LXJ%rs|X_?/gbR2¶ ]C(YY<|(6S#]śflcf_-fRtI<ܶXmuh9>]b0Ma"?{TP/b뚵Ço77oKfė9:2Qq9 s9 Ga`}lN!6v41Ox߾u5Χ0bW*ITsؒ8?V^Иլ颼[QڜP s~"u{trԊN"T2zg-vQ}L115 npԜ=Y+kF:ĵ=w0B 9[FQ]' CYp2 q$uL =єx &(-$ '3oS>c[IuªYjԠQ)`8렚:Zİ盝Au \ua͂=f4L8!WRɗVSVP/ӍI*֘4cSIsRܽ_$!z!b%1EHoQg;sNEUdG7 Jmϟ"nÚ_gIvgjÇKoٜ W߱w=~~!d Ћ# c׷EX FX8DЬ, ?uMpvNL{B~o%tG)\n$=SqJ[y8֘óz^ب $zF@*ԇ>46@ ת"W7 Ňu5B8QeEA}&3z+R]́NĔDT9F^׮=ZJy0ND>b&i5Dw1RI2Nr&VNP"P"Z"ɩ_A4ނ6O$fB8f"3ђDi-ю 5_g&uAglI@QOR I<}}CkMWI'hUVwnoAn)@׏6 D^kdZkU R;9wD] 1֨u9F%= 0 Dː匣{ s8EϬ.mwETӽ00؂`$Y?NM w374k;9t#"J2LUCˀF­!^[-Yl tɘ%(r"$|,s@JN$cc+ ns)OW:HRe"˙w< Ffkl܈% UdS;GSTo=NJWkFȇ5FsѧE^9oa?f]%z{_Ibc$f8FE_lNJG λO:6os`tQl Gn'šP3.tf#M&05UE%E9|}9WH@q6+w~>@^ɜN@WTL-RG}j9>G/s#jx9./4cZsCη wsB>ۂr=N7f@!d\"<ܝx(^ןm~A%&44R҄SS:H\ 4"+\*~dRMY$$9!NC-9G Ɲ~iL'pFj4B-Ґ ]`-$۱P+ }=;2g ԆRKe>hL&XfTY坰|{1OC,"(<.IO刺imViӔ{[7T n - he!z AT7gYHکZ7/K LAAIA1:Fè0V­'ٻ涍,WXzHUzHYN*3cWSF!s"$gjxI@jJ%6 9}.碜qr=\$=!Y8nr)4i=uf<ÎD")HLB'f&=X9koAHw$89=WJF,CMp*[b. XB.c3!N6,: 007^68n}~]׏`cIeu(},fw[\Ӝ,o.=D /ٵj'>boeLbx:^|lկ)\ T=]ŗ.uɞ{=07Y3&WX *_p=UX`~e"*!%.\ ԃFFϠV@8m,<+n T\}ᡁCF{߼aPԶ)|>QR&ߒ#qnUCH!A"dT KVY<22ɘ0&d^z &)Єr!K9SƲ4 d)J{1(&S㤱, 0)nT;.AYn~B~ǹBԿc! }0a~[zdvrիhPz5E12I:_9z3EɳK5ZL$KS: ˑJ%$ࡰ[icAS9gR 0:i3JRGbD &¡$P A<+eSˤQ"Bg%6cւ°k*a֞ͨdJ1.1NPi-+h\vd*K-5DȚS"Sx1x񾗍)\>ޏXQs ;)RC Y~Aa}ŠK,[xn*%D9s=LRC-,,0@烖S O>(Xb0|@4#)~%^E7.Ik$J?8)Xhsո2,'b)0,ZDXsjE( PThEU#4#~J+ dV+0?etpcJ|VS"h"W_?DJY'ff\/>A 1?75Z+hb|94Q%j&MloMica|9tp`A& ZgS%]5|J ySڻe#y7a)cU"pzdbBwtR(C3@0Wr.EsRT*D>ځ7g7?Յ*@xtq-Xap5e:Wǻ/<*~8Lr)Mu tV-Sojz6Mgi~ɍ2Y~>ż% Y+W.d<%TZfJ8W bD,ԌapиwkNRhqp3(̣19)8D 0}xԝK?1 LJؼJF`NA|hJLx~GiТHZD6CBfh$,,xQ5Su/nCu6h9!㨎f6L 5lgίo>)N:jqd= -`Tr0ڈᇥ,OvrqZϦZ-оH޻KG7?~p&,=![c&;77Q%;_ 6mYt%ߵAꮆXg'm27n*1)Ŵ3?; i=)߸*]*3j%!rߨc).mZu3%b\iE8m V]a `"?8z0>.& S6ū/fj2N^oZ=hNc4l'dфt5J 5ZAu(Kbڥ&C,Ɉ;EN {&*T@W`r B2֥iR#M2G_bR&V:VJR爔#S`I&@X%gܨLK"26p+)܀TLdN3B[) {=Y¸[Z|C@qOTm.h0Jx`ωbDN0&#X6kK8Vܔ,hyBnhgg&_C( UfD`ˀCV]mvHl%iIf1s#1 #OL*کVt' W#ˠ ֏bЗӄ0ng=#WA*M\'e8wS?.ჟrrZ^ :c9JOЋj?L }Wyn3ayb08'/~ºu>Z겵֓P>*''v8[^.έ&8)ë }YH%An7)vaa}Yr M `]!C.E]tr-x,Co:\Pzw;,^8}AFK]pTs&;h8ݫoj 8:Z_߻Ho5@q BXZ!O`-.k{?1dl7|z]{ŷG xeq37'arr-'֖ӓhK. Z{ =fp(*=]AjiBJgmOѝl{S`i.7;J\RjS=(owixzYnW=J]av5\I4ɮA(Wn [flB\_9@5!Cpg[SA= lYm~1e"".وC< `IEqu &p,z'3G폑mB8 'ڝۄ[~"G৛Rvјt:vJ~dk8#9ohzΥ )FRL8Npd.!I5IT7ĮTU۷\Ēmxm~OEeoU "%Nc朓J[Δa HB 1J":9piT\PV6;f/p@jD*̎ !f8eVQD$pqfc$3rU7ȹdrJnovnWdJ.1kypQ7EjԎ *O)fpG2) f2mF%S*ΌUֹa( %S"Hs6J`1YGb6iwDEI\gRfLlXqM]˨\ybR0z/ތ2XҸ 3i4jԖ\*/ě)u8G {g33-w [{{`j6xyP#vMhH6h1ڐ0q|!b^DtA"-XoV'h*<@1fUzwVRJ:?i=Su>_x5S窛W 8Z gG)cJ#8;wVVvJR&)[$?ֵЀ[r:0nK'%jBICbaMVǺ{gf!}sTSk:tfM4{ujuMNhlt$KD;DSz ;G)nm=#M#Zh Aϝ KщE$ hx,S۶#v 'y~vWz󐚡ŕ83 -KNRFKh%>ˠi@Ԟ!x)X ^jdx2}/9]r(G o:Ħ-ϯ/a6 ף͖?[X=N") nݜO;~)m9ktG-9ӒY[*ۘ`|Ԣus*%(%ݹeȉ$;(*+Zk7g'nqN3iyfe-6>nhL-8&;b[X KTqڋMx -HM&imRuaYNS-i8H\j?8bZn޻b7?~%bM'!%]+ڬz⊠\9͛T0L 3av</QJD8=]'<7ZǑ a:*%D ȟͿ9֥3[=C.nMN҅h'CBATaRH$X Drye6qvR>zljYW|!O,gO x͎K7I>.+dJ0]ju&PS@>:-|Kb'S^fIfIfIfuZo/?OKЖV*CQ#"`"RJu餱 +)KoGO~N-<UavOk%}*A1И/оE\ǻ _jLQa^ 8SUrj-S B [~QdZREYR=0# px5z(8/JBVJZPdI=-?,t㶒n2S YVaWUk@ؤE"`v.x YF/ (!(5GY(ޡJTh @; :.Y+B OCS0@vAB.mLnjO 㤘j ,- f8w6J XjZAktAE26`=a*(-JDtQ35aAPh]78\p6i)7kbK!ZCB ֏SGetRPʵcוɒ\+RnJGO՞ZE?DEwWC|{~$K^醞aBO]o'$j젳OV߹rŒ+x>]_m%AbۓOg.҆4F,we}!`] \Kgc&S+_SχO턇uH/"q>yrN"7)|[T?8駤uo慭-M:oo77o mZ~ "Wze(XPWVvzs]KFyEiN>Dh`:ӞЧ4n[mic鯝K96fp8[7^4#j?Qbkk:1C. _ǧL}7=BRjv,kv;ӫVy/VYcYU;&E@!l;x,q'9$/ ɡ= ǩ4 j\aΐaNbFw#Z [$4vr_o y*E e=D`zG*A}r1n+"wۻ^v>- q23O*b젂F[ڙ׎cVHe|,ijcnYZ/{231rq$LaYN%h*E0SS-ebZd'\3q##>3l0pv ab d]}6_ p_^g LwSk&tIoK/PJTLqcS|CdEX0ʘ+PHf !kXZеV@! P1x@@K+(UB) ^] G[. 4@oJŒ 5QmA5ңUjƄs$ BA2" %+-g)_ $#Њ0DLw*/Fԝ*J-rྔF!*,$rFths F%lx~ZD1^N /}jaIĶ4hE I&3d-$H2؊\C0-P=:SѣJ%Ֆx56l þ zMyzų.=ϴi@ #9Y@kY-,+jH,24t\N g,R!S,cez0Yd-)DZkqXkZR^0իݧPm1_K|9B [:̏qbO+?ӿ⪚'[Ե]JOPbc\X9__7h+|tCqY;'=vkA}Gvo⩪=ZO4Wu!!_ɔɔ)s*)1憴;iTRNnjAi-1iSNr#F\P5 z4 khpڙŒ8JWmJyiqDi+a)sPE9!5|]9 P-Wc+ 8|¯jl,5&բHםAQ`6Ѡе0aעCt0%ċԿ>FebTf)FebTfzBd޲T S.(UBqHgF $ 3̡ ۾U¶/[p)ME#`=-eӺ-7j%ÈY\ ~w?όXee;' CedˈIlV-кD3~ qQ;.) |ns.@ SSRm4EMa*)6J[j7sAiqB\˧WtLUs x.誀go1SY:ESY:EO+>Jo=G꒗{ EԒ9zъ&JKF4qoW|Zm_&UNڤ8pKXM֘qqjtCjt XhyA}E%W+›( ȴ̸҃ o8 DΌ:8UwX:4,xI `J .F8pё!'/1B;PYyo Q}ǽ(RIM3'1gEۈ2@lj:u`ʎu:"(ˤ=:_ T-ߵ jgSmo`j:H3GU WIZZ Er%FUp&_/kQ`ETK{l\Sx0B&J (\ LкEi#8RU h`zzR F"H{ "bTjQ8[,F (BH,hG(H R,r9mbm76kQRܻKklqKPL.k<^== wKgYLU? Z>?~R{`^ -яd/ܧ2>,џO?~{B|,g'bZ0[Ε/_%`~ w|"p m v-!F7 {I?diB-j:ʒ'ʖiب!vy|N7=n1ZZ!@lg4sӶAh.=v㊆q~wXv$w;=X,no߼˜ҿzԜ:.ޤ'Row9۫KRܧIiHW_[B Q~LS:}3ǷЧJs%:LxSr[n='h[{Bުsc+erj[sd8 r . ةr*FS$M %lkUs`U@uc4+ȤBGK˔xΔ*(/*T̟,ٙd{mMxD@eٞ-[3ِ׃ۻr3QhE 1$us8M85`u/rt;rƨE=Q :@ Wi+2-^wV?έȴ$LM8S^ IZk UhM,KK^)?p&~JI1XZr=鵉% ZX ֒U\hm8Zk&p۶dK9؀<$dU4ǟ3%cIyii)m+]}!k'q>f1._y\_~t< -z3.}qnu7uI*'y/%ЕۣR~W} ukAApv|z+D/WT<.͋*Q!90Ul$E˪N[Y4! ̃tS;ЧR!ORM0#~_~*O@Pb0PkgfEbi~&QaDQR!Ki(˜㭦\]uc>.WB-'Ǡ$%#$!#!`+d "KJ,t9b2̋ϰ&*$#?R9驌ZG4"g.+΃]kD#XPA & 3@(LftjTT'NXZۛ,<%fZR&G-`֕zUjI*Б (n:A ı (whel9̂՜ҩk TӁxANX N/%fa #](V5GB((52q< ǠYj#_B~!dYdPD=\Z\ I'*t8%s.q )9($G_b)?g=$l>8 .9 ֏G?Q%t;frmocFaf#.Jס\6KTC^"8LJGhBWAV~t˚I$D%J 4dU$Gɬ|=rG#B_\9nqk6Zy!\ _p* -a{}y}'~yv' VAp}v\MNS耦HL5E5A C CK :`5uUqMt$MzzbCwɰ!Jz(/#=ur~ڒ ML[~F][O>4fkSQ]M/~II#̦[?=Q>frS~JOjm{ݳoUfdQ[h#u@X}BΕv-J"uJR#NhN)SƒK`x8% ABZ'XKCa)ܺd{ҹyth c/ASAӹzKяε'8Z3ʙ W^aa{{sA1!嘌r;(fCDSպ7xcy.YQ|Ԕh/wBK͕z!ɇK-$]a#b0S]A6٧m T)z㸗_kM# CzY?߻<@/nc:|/|^{P2L9c)t?zR XK:!5bln;?ʧ :ZY?f<O|8Y%JyzP>{s{o'.I4\l.\j1v7cl!CBAuP={; c*|9oA,0 )YĞ߽@ɜ{߷~#0f=fhKG K$;\:^b;E:|n~eu}E vo_>{_ͫߧx,jwy˜blȦXYV BŏH0ҬXܰS'C}@96 {mcb" eoa:]tUNWi&bDUp9F3 SHQGi`F*!,(J?YaSJZ== (}чM !Cl+,mR_vz|k|a+/աdt ``(]h,~A7a^sfA\(2k0h$tИ"4G*L5n[UR-@pO5\" e,A~BV$F 8򨐎0jq[59P;;)bv>LZlEI8D9nYN!R[0_"hң_fqܘOaymrzwsvެ]?oy;jH&.p2* Ut mԡ1/44ڡ=[īҚLg&{&klkXvoxt!'`xU1* rNdCpnU2(Bґx><㩮:vm 0*0Sw0W!mǫPc &ohP^fTe8pdTFz찶U)>DGB>SNȂ*ebA^^zz+} |s.06 0b2`Jbcxށ]1{Ü lʬs~bztĄm75K+/TujN݀DPD?RQ;NU=jD>OcS }LQ%j vMXgnQ$0L))T% {@tI@ݍ=B,ّSA:*I4`:%XVrPIK;vEFܓt*}vKPFN=/yЃKWVґ8Yk! -8yQة] E^,\oۂNd`d_pzұo-_%n{g<&hzDf =C%Z$d$,ݷVp*e+dW>bᚨl8KC8yE1Cc|1Zb|<$Є6c=}+Is2!Z=meH.A#][lHL@Z9'Ar^<jRt"O4e]ւ=T .kXgBJ__cΆDgߧq CggJ q>FdjC+0 ]_b̘'ER2%LIu艥 k&M15T@]0;$%R[?s\DĻJۍ]_? _Qvsfvw(#'$YLTC9`ɘ \6*I0g{^8|}xSCrFE5 /&xʍƑ Q36LJ ][uO~~$Nj-<_BHq:$Zi1!B;~UWɎ_x?Px0Q,)OV#ǭR(,0#REل/*"/}P0fS r<ҔpQO%j=mrH}f Pu>K3/! 8Cծp4Re8ߊ8v`q%9F9Fs`&{,k*Sq (0d3HcJyeUJkia`6DH;KS*+g_,bat{2HEX&֎’‘68KXp)*jyz> {^X4zڱ'+ #T>*0c6FlFM)*0'?x*CB22%h`<_Jcv?m$(Y? u}ܕo?>|#Y ı~|ߵXaV+7 LpizL|O_Woq{Їgb4~nӟI=%@o=<} fv#Ư6~n=”$ G947eWzV)"`+2,,fM݇ڜϵNl)LZƫ_8s; sFʥm`Jۓ a1taXQZPYMcMj<<=~C/m !xr,rObT*CK9FWwɕFH2XYqUQ~3#ziQݠAKO@p& pq"*,QFb+4-pT*2bD{ŜĺJ ;}czYaNE4 E@׈e3+A;"֑AՊ 9F"(һĭ+ąoc*Խ,Ҽ=]%pL/a(5:*r˹$ (-62H #y*w4JWR $4z7r4)&)&G -w4[8iTtAჇ"ݞ)G^,Xk5E6J0Bkh)K$u,{ RktΰeBS>kV$Y%BiuI[3xr[$M155A%aզx>(ae0$4ٻ6ndWXz9ge/l%uyIJ`,V(J!)Vi )ixC`.a_7FンB F:+Y"ƹHN --m/ЖeZ:k]< G0C0Rp"E[K.!$u=\XP=\H up'qOKoI]گ1TAkVj=7VdA[E"@Q!j}u bxF9 ŞqDITBX De! 9Jj@@սAJ=A@ ˍs%oe͠@~CǾ ܰL՝krFf,N2eT=j*FO{BMKv4l"BT^zunVqO~ʀ0>,/r9SvU*}}Ke8J)~Ӊ=jb(hҧRRtAi^ lVRvz8G{qO'wAs%qV&-q@Btx 4Ӭ0c]I*u1$ϵ/Cb:*fBަ@)ux%A=$e𛯿 5ռ+K D[w~YiN]&rT4#S=T 29u35Ԃ,9e#hj{Y]r_eĵfܻiR3?OJ$z۹Ka+`$_>_4h[\S}"P\qL}}M މ@5"OGs|2OM>*!U Zejn&C0=d.ސnGC x$ҒG Wbbl<)ovkW {fƿiRq5Ut>JV0wq5iMQH˨9igJ3΋S54\KTw«& m$WiwWzg=w2֙]YK \K!o`>5qhXruMpqSQ6ЖFikjε^EN[!E=bmmu-zQduLj}z̈zs##vF18G" Z>~߹_||!%:XCRT[slcnt /FcN JTGci~Lǟt&7G+ NV~lW5'ZGiV!$vkZ=}'G7K@Jiҡ>p^):#Ê\;й  a] Sy$]O)9qءoH4JK[ faj!9Ju_^@hCX{}((LV1ٲ*[Ņ\@5ɭ!B tZmRhQYKly.Ϝh+)JA(i-EZU ~P+0$S s,f-RhƑROW(8 )I3+QczAgʨUb1y+E8*6zH  ꗆ#0 +֊S- lnP;Czn#7}oRp]}@,IK*TNnk"ĻXiOZaP: 'c5o(!NʆaꍤE/{I8(pՇNG281J*ǹKw+\DϞ+C{U̽/N{c{AeczIW.c$J6ߞ{F 1[ .J׋^Հ,qg5EΑW.&:s$9姥6ZjQc1uv]߹Z`w>nԨތn3_;~Fͳ%1r ʏdrq)dp~`$UX+}6H9)R&%YrJYef#SYL'HImY4JQvTM4̙'Mu0''n/G_wԜTfvMz1;yոB1Ή3X10F.W-UVܦ O8hXc|=`WXt C4Xу-^Ħ/(pA~Mr@˒R ( s5xT\QCH{Րb)yN N.P/<Yuv9@ >h쫌M+üK=+` !N$R+v™KB%, %ET 7I FA|Fo?XdGt Kc:Ho=gv%_}ˣ&Z!)n כo"'앇V=@Kۚyml sd&: . rp`zy{+זK<1k+2~#nkz͝+^Ǝ򄊒gn?k9QUGe q)%4=gQHq;&}Ym(.Id?VE+&+.+3 3 2,ϛJ|_w_. ZuOgnttyY8;Ϋ\2fDaȈ(@-C.sR[% ϱ. 1HHQFtA9䆏~\O|;8^,`<ž3/F a#W ,?;GkWyohG<>ݲ5ӝ& BFHKClef⬠CAD2- $(չs. wX R6Y^/b_e*lեkٝ(V3?wu>2[?8'Oś33do֯~m&>7w_Dt]· |H g`ZHvrLȪ($H%; Z]0Ȁ Sm,NA I؏,c*\-v_470JU8%Ϙ0BJ'q) (2BV ISHDž51Xj*(Up3r lﲅ?}Q~JD*;{|)e˧?x~Q6~oY;x1Fx\?|\~:y|#ޖUgjouܮql{A" KB53će ]xkɰ HaqV+C31 .Fwk(o.gL׿S9gvE}"$RڗȲ)2,RwȜ7f~ykgvl[p(D+BݍTR*s5?\,HIDmϖ0՝~ߓroGfk&{&?j0Qn 3򱁕X$Xr!4}|2}*4FaƥyU r$έS.-*sKQNB |ۧk0:iOV-9p,$!Hj%s|ocU&, ڬ,Z̲ !ƍxe8ġH`-;- n"\!ӿcH>cX,2ehvƲV]ӏLЉ$n{3ׄ^dn| +ہfà k1`| ~oʠ6\ 7F5W%FAJD'y֦o 73hZ!hm?n*/|wή4,Y.Gfrԉq942=?Z$Ó!ѶFbZUn=ֹHyO$V$2)S剀袨SN `[_R :@NWX}Nn:HǎgE[1q^0t3iaiᗕuaj#,H '2!3xwOƺP/xK\ynWJqOrY0\Q0`05YAgL[Ƶ* 3y--6۴eq, GYRU1hu:E_9 gDmr:E%qi#R=uK;wG@J1LK|']2+8-5೛~)1PVuy$+r#?/{%kDžgxw1+ Wno|&i~zMA).n*=ľc%[x~Gd̂tJ+7O]~+anlߏ+!,Eg|.ee=ru/r)Ex7w tbĻ2%*Fn n),䕛6%q&GHkk_\fIDa,(P!9ܒk]2+_|˜|U̹J5A Iw*N>Y4RbaL%* D1ʙE0`싢c_~km&bb+; bnP~b@1'j"b҆(c !CƐA"760%M +Pf3sʜ@=G#/6?# TT}s&v!PB`e uL1W ,) XR-.ז 4x?0O _|2:a=XzBħ0"2" R`P6}@0!%Z+4JZE21{o!ސj{9 {︋j)CՍxB^,8HΪSYhDIBp=J LJC0+.00ҲQ{*hY=yH2SF`L@/nc6ߧ>mՍ\MQRnʬ"[*=[pܘ4̝IDt&B섑0mPYOcB Pp:F+#Le F'$!dMhl|KHA(¥bA^و)á2- 0[-:BZTL.vjl@n "M 0Twf`w,S3w,1ۍ]5Zd=ì%hWhl : d>Lai˒3@NNO_@ " ΄Cy"p%'T<Ζ3&$4H"&49ϟׂO&d?w#s` qͅF4<;R%+{_*7K0 ^ 'yZ,jzRj.XiӴzf|މ.>.>>j`>?v30wW$Tp?#+j~sw]T&ps%@4yݖBblI39Zܾ{^@[Cl>×٪[>4{W0"S7oZr|柮gԚ[F zѾ!] OF(fB&ȍ{ _M(%ӓYAJ_&;he̤= 3E-HFޤ d8o Y̩ƹ3liqLY,;O1VtRX: eFJV(|JyDҔɂOijljbrB s92Ggh zY֑BKkgq.z1s8ܳ,MPE/{,H&,mlIHloDzc7s _O{䗟hW7_<) JW tF{l} -}fiWM&؆"rIcH*DaԠ9)nל[ ](Т.P%jf-Ch&H me\otY&nwHjӺkF8;=Kn9Wk+*FE'1'@gGRPG5lFgF8?u>zyǾ9!?}~1cAJ=U R_t6Q?Y;3OsRǧ9)YIyKQV =6HsW zzt&ˍ8)!*'RgFg]#? !;O}Abǹ9>l°H͔Kx,NpyΞVvmޣ'PK{4 ȣ1GO47z1{;$ Ȱ蠫Ѓ 2^G y JjJ4w)!` ź117\c.{˶1VzSc,m><33:-n9E͍W2-EUv1"/IRJf]Y`5!J]Q߬r%_IXd;]fإK#WKy( ,hOj4VyAhTe5]HSr W]s>dӑ`g3(YLXn1Cs׭V#FĒtcJ#FQY QE]Tf@E:rC@M D޷|NYA5TP0T Z&4iTIN+ )^C_Ҥv/3qȤBٱQN@ 9tho9u@]xHK=쬜z,-U5%2Y8uϫˣ5t֠2) &K Fq0F2%:d, -P Őc;<%ZZ,&ݏtGQbƁGFi- !@7%mJ+e^Ǥ4ZлCvBEGk1X{襒mOR&QV7 ӝ$JrNQ+(Hy#1r.@t2x1~c{; !!F&szr5WҮE,^Բ%-0O{zz'wA)@.r-TAM2T *U#[ݻÉՍ!evmWL5GQ7.i ^^ 87j'mDzʄL_|xKY7H{D{j EW_]//B\xn/>d<ݟW۫kl1}O$Wws#unC&x)n?<{Cm 26/|E4R_YӊޒMq&Y}wZx}5ZLݶ4u"DE)y!JyP1پ0hU(K *#9@Dm's8Cvl-$H>ݲ=uB)x(IB,n LޜQȐvT+mS7zZx]d> њ:%:ib/LKY1qYijiQ鵎P#;_{KmyBY0' \{O-J{Jc Y{Y&x:f}HVijDs}]RfnjoNgVJȜT*!߯l9TF: 5SVKA}ʯ^\N:SK4\CY^9]@IWT TZ.ϲ>[~z3JgYX9^Az{]zzt&NsТtҍӬ'n&2kIzGga2) aCDM&Xw€L[=ݩzZrrJ,ܷ|kYxYxYlQ,Z=8:(y`'Q M(WI.݈ݨt ~p͐HԨAݯƎz a+S\ΣF&ϫ; -K+b`֢![J#&h_%CwA4* br3*Ltn*сԆ\*HK'u!|!*V%+|#EDv*p[:FY!l)HS؋\n%nF5򺳔,,N"⎙m- ̔!"2Y!~/̖&x}0q,! wI.#pl.pC'd 6.k)ϫI\y5WڲҮ.Db)cf'5$!+%w)`vLqBQyE{%7(Gր(kZVI^%?1Qc#uٷ=#'̀HīvZɤ{/?܍/Fwˈ`EV9WȕӬdAƘ>}Nlyӑ-cҿ ˧[r vґ-jr|$G4#G^1T/U~v,WH$p >\9XlOsCfqH,V0,qtRi2C3aܵnegbtWl_"xн{Հ+ '5,7hzQ>+kd=Sռ~ I!͹T % : *DĮ^up&AC6_"1G?:LkĈ $aB,+!;X"%KV~[&VX۷z9.'f_gU*j3Z1>V J(~T7 HuK%25J?;oO蒶;wݏ + O3 zgAw}g0oan{?{FrB_.{9R~ nx7%*pm_U)rD I]H4Uu=Ba4VCjF1@=JƆV-)2T ĭsM!xP`@(Gq !pԙX?͇v^l4H%bF`G Q&JKU+Ƹ)% 7 WJl\4VOW-"~i/10K@¿woMٿ;7v6+*heK ]5&NN sDGCI|>TH__firӆ//Z| x+ |60QVv~BcIqt敜LY< ߣzp;i,`ʏd\\^}e2eջ/2&xi5>|L^ J>7:<ޏ.ݡ:X ~Y(K`A'Hϲw1׍O37Dž3.ǾņlJ05|J=b^N& I0y9CN r 1dzPKЋ b7O鬺'=kϵk_8c{nM+ -g&H[p8m?wt&ŀ7+.^.6?}t^6GרtQ{&ٍɏF No]TWpT(8:^t) fɍ&AقPX!D0 $4ӌY8{Yhn '7dlCf))Xy; CuK& ,p)y`E2╰R gb%k*s= +ʨx R!6!\y! GBVZZ4Qm0 }kѝ{vC55#oOD dǫD>֔&IT$LBi'`A N4Lvgr`ZiAz s[?b;iŝ6J_{W3R˖k1LiC)2H8c ,1T:EIǞլ jc">H*"҂h(TGij-1JFJO6#;;Mai+$}}Q2,,  +< m967]@B ^c Q1niz"OZ[W0k1pa`4 b1bha |ZH 3Mltoܔ3auƍb~~Y-XnU&Yx_~x5(ߑwoQ{`^ ǰJsp}A "?/k% ^~.6fN¢wroHd!.5%TDa"̓2 uW{[y,ѬW k8 b\ }@zn9lvE!Nd$Cpqû{(#tΚ{&J{&4r$Vv1rÏ?6DD3Ųdp"A1Çc=6$ &ZV_$24ޓKommӇY>}X2@NdzɓJmo\2v9{s6*md(7fщ~z1(ׅXR+~D>hz6,Fa S2IZ?}sr ci"D5!ds"e3* | A7}XeI>Taa6(;pn(2S1g:C]m)P7×9N#(e-ș҆jSQ+d PV-Fl 3V`wjJ)6ve^R1`OfȞwZکg+uEEqBqbngyI\Xj9:Z8[uEP)89RGoenHLΦb"쬯lA ޤOdw̄o+~Cy eNhmT $[צ9v 7_efA=(3?piu"%Wp{TKvzD%g'.,!f9Q]w Q1L+>Ԕ!pbHQYݩᜑnk3U)LBIÙSH  :e 5֌X]1{j~dj?-UdT8a%Q*sb 3f hBTxp= X'֖eB`L~ەN!aM',Z7~$_+dU~dɤ ߔw#ˢOUNt)T${%~ERJ,R(Thw Sp'^ރؠ1YI3.RdR{G 0e/5eӢr qZޣT90nv:SakV /nҩq}`|W~,o[tO{;rPujQhJ-:Cl"/)(G?z3{;tߜ]ёu$~X^ÃKl (pkߍ~wգv|_`9of'^'&^ڸ=*u3DLEt:xkT:(&YX}yp#HˆF$p7+a( @\zoqIU{]ãGWw 4[)Ej]Kρ Dű#'hSMh&gkT\^V+ Dj%i5HP% 134\Lj}f]-b6fSYJP"ZA5 MZb=l9ϳym$eX{eoT |\r.+ uÄ+TX705hnj)6TOU{/&c?]_lKc<_wZ~Xls ā1lq(l .[cIyUMD?lG੪m kn\~M),+y z\OG7}!'/gS֗5V.;kt$!_fɔgh7BCb":hNOi-o$Qu!!_fTəɻMpu-}Gv'Tɣi=Dօ|"#S\]YnN;hSVGn][2cvһsu@iLe_Ϗ1!0AHZÖ¢6n&pTMsWe)n+z c.!Įs8ȩ9\=%9:vԐj`Gu8OXj*oA$)?EI~?Q-?"ÊVs8ty2~-%2/ M)e5^VQKz8ʔWXAPeeT_-6/3&ĈS6w5'lǾP=lB[9$mr A]RMH|~tBFԛB~q兣P 1DA>xYM^݌_(EWg7xyI%LIHB/&.`GM4Y4_sU}Nv)PxA0\YBh$9qeLPi`Or rjK)s Dr-tt4jhƘ=B9m+-N(ǶLzx  cFڒ#C1X**sk,'D]B XT-'ſ  jdY"`~Wh` 仂YRk ;MkMJ*(Wrr,5Z!#>~N$[uƳr'({)rPIJV&l߅>W}0SDgu+O >BNg/Sr<"qR7rN eH2K<vPTj[*:B4^*J ဖTAJaԻ0oj)N#= ,bX^2%B+G; U{sh?gPSN|\7/"ax [N#;0;FsCi/B(L;ǟ8Q*FseHZ>cy.+~:[<0O}5ZK;g/(^'N[H%'PTl3/ G拟MG1[cY|73 [!d>lcׁ7qnfR0,˗!>!C.h!j DBDZB^Gj?w|t׋VAB[}~>}~Q`!f8fkn5~AժW~ͅ&ْe2^_Oa1owntR(;K~xׇk#w}_{t!q"yyu =7B@)6)awgV՘RU[60OQk[*Fظ$曕uJ;| 0L( tUGa~wib;ь_3 @<_^xGf:>'3,, ?M:F, h3` [=ynٰ<҈ 7o=dE͆nrBOckNzȠA(ߺߕ`_ &8Se5W- @MQ?&E;4V>> Tjxk^-9IKюٜ)\tm.;uѻO㇇۱&/x춷%q#5rj=tm9F7XYwQZK i5XkWj P eʕp+5\(KpDm/d~O፸(NIx7F&xAxnxqHI5JWkZ# asl|`%-Ҵ!^H+7k9vHV#-"%jŜ%ֹ+{RbJA/ąa[FB8-AR JuޚC(/1g(0< o (N 5U0Jbok:W_URY!MwT* 41RIX&-'#22]?Mz8H6bYO).4 D;pe=[0?qPs+@R3*Fq FYPsXRRIQf1=hPA ~qj*B78vZp̵'ӍCX ~L9W;/&f!v?_~?z^ʺ?;?cϋm}Xل/>c]*޻W~Q0 fa8T :Xk- Vxml'3gZ,,ot,zOf*Ny]Q$uBݵn=ZtHn9IS:n_4ֺCyߧ8 '̻]˶z ˎw.k]HN ,G 9Sz9 eWL*M@u\>n%{.&sq!Ub^B6W? HaG!?Hl%Lg(%]_aP< f\T=$P޻jFQmDo*=&M8%Sq>HIMIcG RQ$VCz"g}5A -u3 gGQ<O>6cH$T -s @v8jEDW xBAvHW-18i6 S|jb/Sv/<䒺./+Mݽ_s /m)3oc-05_ ٻF,+_fvb_Ѽ=Fcl26XU)ͥ ত(0 dd97{ml-s]BzYx9iֺ( ל%B/0ecz*p mMySh_1x]356ԃu}2Ռu]cmN_W(+Dģ 9D_#V Ҕ^C\jTChUFjw-[ġ_[HwwՖw[Γ!\`RX8F4+P$äHp,ֆK{0|foFf2/s=,|wAȋś}u h-Kuҵz y:ARubobذqZ:r!5^%eZ6FL".DnT@el6/"B\gE (,ke5t nSYX[̓4>'`),j"*$,S:u>0`ɐZF%g" h(g6`-=TsFj-d%cBAOSr'PhnQy1̦`_Cfl6x-k2Uƫ`xЅf]Q`# !H‚3mha•{#4%Rp%97Q5B8S$#E4*h)sڥCY|F)JP#w^#7 ,HGn!$t(FȾ„^׾ƹTW$Pr0gSzT꫞FTbZN(4m\`„v*\ XҨŲ5)vvQ;z"|gJ'[m vZe‚ ֱdJi((" ȉ6]m΋).$f: 1\ HfNš;Bu)ml)t]wyGqz[:S997- \7kqAz^dwB*kn-0Ac$o Z%oe@-y3VRB;:L\fԼ<`Rw8Nl:v^nܒ8c)xyD#JGi`v$fEcBN*C7AI <%B kG0q{+aVy'ʼ )j Jz^ ?YךJ$sꋌ<YnQ"XLԗ.(12͝)G\R`-(a5#4т8o%ݑ() Y` FəE.TD0 4Z;Ճ=?,KRW) C)OW*F'7&GWr4b2zѧe+ 7sMaR bh\̙BWz!-HSΚC:Lf%]iHUsNd0~2=vSd15 kG|xGg!g!?ɂ-o~fӗV *?ga|qpKӺBbuhqyx~f*{Ҿ_pOVbT*'?olh qDb=H Z1ݙQ]jai=k&^ bfW0^ʭ?Z9pҩܲ ;lvmߠ֡G nΰn[ '- LX8EM:C%0F؈En &bnG잿.wd}y|9mxFާ~6jv#&<OQ>_O_HaWJƭ'u(pEUZ!JpO#R;,D#%(qCFZJGKVRZ5s}&yWBA ^h{N`<瞻i`8\ lL{zq !]fRD&sRfژ-h#}nZAt2[dtcOaD`aUns*yMJ "^hԇE(:bEi^ľ_VwC~a/}Fwg11SqçP11g6$iJjd )O ;G8qE3|@Vo 5,S 1jqi\ Y Fwmk&nm$ 9dA N D {6.;K $63İDÜrm,eQ0kXRGjabxgU2%YSIR}:$kŒVr5qluB@ > ¥nw?X.d 8|/#/5宿qO80az7 ei2͜$ãOz>\T-Yu;3$< ɟqOa:Π.Ul?2;^eQ`肪\ьò336`3 LO6Ͻdϝޚ;MB7a-%Nk #*̈i̜ J?STqͳB93)FwnĐQF"JT7EPs%\Lw1c/أ e-&wϭ8^|VB:BUɭǟx(\4w;n]T`ܡbOQ'+/ŀݎ|"y߈k|lnJ,QExKeiQWbKh[q/{ H1Wו CJH1 8-n8Qu*N*E\H֏?wжLVq9 iŸ b6k&8at9+EDg1xfRD3QgB#wez &5rؚ-yʏbjk&eM \{fg."x{O$}J>)U ";.&3 F? )[6q]f-zTh]@%c!ʀ5"^5zzxS.EER]5x]v5D瓖& q wݠ‡$HQNjj\Y&-uqwH9ӯP'X`sSP+U <ȼc{=<\)<A׵~|5>%;QuU}MjpNw=)LK eL\򱙹O)&u0IR2TݣqjY}N6&lc?AQ^hɵmd?:`nñG#p$nZ$878.${}UY~ أcH$ gL"mD-BJTBɽ؜-ўEZ5?ܰhي?k!BlgN# -z~KEb kաNg`Q8 {G\$vFƼ,"`j7].Ldʆ[ޗp wK%*QuvJWr9\B's9Y̖UW&.MIa04vimh$ԒWr O \R8AZjW~^=o0Xne?իwzƲ+(J,Vn;R*Ԁ)C p+El)k. Y!W2eVS](ۑwT:tB%{W6yg,#3lH+Hs'>VL$Ҵ=1/bR8.C.=|zxp1`R e"*|fT =|(c!&D$pBx; PEbJSƒB U9:`v\kU?iub ji*2%tZ4gґHR_9u-EgbZnIqn[y9~mnXsr,w{4S<_ oF1r'6^uZ=i*IKsM VSjJ۝5cjSG*8+Q5R-%DwE*'ŹH LtqqNj ND] Sfuc˂b*c1]$(,+A4JLԒ#=!ji 1Bh:i Ҙ^M b,u΀Z;yNv=]:M35ץۥ-™>.`d^JB/VJ-fRZjXD)dm0, ՁDǸ9 c3dLd KšsovP 4E+A`d \_XnJ4Jh?^pW4$fX Oy?@]G$2@/`Z=%ͯ, ?qGn`1ƴЄn>|~# z5}1E<!m3E_~~߇)g2w`}a7"]vo`zebg5.E,a4,RTJևԟS)`)S$ KSpkx CUJB/BR}1S\˜.CA2q,c 96ڴ (E!LNzpϣSmSHc/#`Ԡm%//oxIɬ$cL %3Xa@*;;;hj<?P)K ӤC'"X08  QpQ þ'F!w"[ZnYH8m'h8'-hV{Sh+]KF1V0/.xB)HnB2a4SKlkҪ+[ eJ@iz 1~/FF-kf*EL JVXi53J$q`cƒc˃^휭' 禇 -! Ce1`_{Aj^Oa" $Sdm d3JAm]o#t|`Iy蟗.RC a7bu(EY/d)Xz7Sf'MyxOFq2Y\_k_W` $ǧy?P+˄NfߗH}dӧAoVkoOKi{} W'xi-I\jYJ̗z7fVczh@ypL+=G [ CˊH#U4\-C_}^N`5.@oFr!U%/ };4o@m̚D/Bؓ(|Ζ {.P}k˳IaFyZ뭻c7r([ ʳSd!@H7&g 8s#qZEQb`gH@)F(=* I,\uJWreS.ۢPZ2 jib߽p_hP '*{p/{3S}|'sr/!%`2]PYkբ^QqM!#l!;VJy76YP"zT. eYmao\ie%nj7F=LelԒ,zID(ZF%4l2ZeBaJ'y() ;Mi=}QV0}9R T)o)b9^4@yc^ͱ$+Wk31#~m}"6 ݭ4m|FQedL3ɢ`>p!30fF@ ՌjkАEtJʂbVH.1sIK"x'r, %ĐA#g*QeVgdh(ѹe,aJ"]RTA5fRWje–4}J 6 o)[&?í$l|0_wȤ!Jq}ܱGQ`@ƜRf:6ɒl8dg?)ufQB$E?[%Խzϋ̑x]k3ye9;vnGGqʧ ]*$>$3rn+_΍]fR>%Tmn8 }0 Rp|,Qk-s VRܩ¨zM3߼~.Q_Jn 4 M(j^"YYNt(@QCҫŻ}?5FV6EoBǥ6O4 SBkdޜtX?3oD81<|<54ᵕ5XrNr5.Kb O%aPte&',Tbiwe<gWiBPG%65橡vׂ@0SԬYo ܹ&#u2t=}c'uu_I]WWwֻR;σfC sL(F( ,11Q ֬E}P"mXQ6ąg7C@.*֭}"Gml|]N.@PɬjӸ# ]qA6Jr퍍,t766+/Mc|wlCzДڔ,w~{f *-7lQR)]vHwΛV$c*Q p2{cyS\ݿR;| I ɮ<@ yw2m3k0 ʡ1iΦO`|zڵ'॔Ƙ[EWXpx/@sdyURޓcЌk!tK]ak@ZP XMyxEZ*4WH zA24!$xdRJ S têC&@j=GdfnpřF;ݐݐs꼼aV'0xL)ON/DD6 ,Hmrhvsp˵g"NKS 凷kٟf!ޝd!JuȓQ2it Ý^8A}^_< P'+Iig+Rtj=39ȡpwE5RȮg5糬[甆2c7^$f!v5BΈ0 -ڳ `]hlqT1qCaEp_pK҃AWhkP84̺l-07{(i5Kv D&%sX—#y%g"Ӯ fIA4T Un߭S6ҷ:"V%RSrrq̧[%R>%s'~ѭ5sr)ގr?&icrn>@Ʋެ}hb YZhvBBp]zFM?enަžݜ;(J;&X(J>=,s՚5&-USӐ-+0$˥u=6UDJ)+"Qnj.g]YCJ2亷2UIn + L)QzL5&B4 UQJD{Y/!a]1Pvm3e\5MY M:͉fɛx#;}rsWλ|#WֈJQx,:x">ͿS/B.s֛GW,P>]Ϗw,zu?<"]3\NFG5#58ʅ4tUvTV/ 8޵q+b ,}{AN:FdIv,?ől&tL˞˷b}bɯʊPLc3+1vJ>W8Eݾ{_=t8A+!W$!mL% 2*3 yLy5$L%{^!VFV6eDL%uZ즅MmW@J;ݛwM̗!.DGkXsW|en(CrvoUa87M* -bTr1[v9PjqZl x @ZڤN&IDKI'@=/< }V# rl8rԍmKe-jmR!V=QPx rkA,bv R@٪ƴYh9ڹ -f2QH=vb<%yh1BԹ\ -$rٓ'sL9Gnh!8baҌrsza5l ר5[;hkPat 8b"dQAoDي7zwu%1`7ǘ7ưoD3X׽YO>xqcJp86xm MtM!>\n9x:1ݎEp.z1,7dt( -QZ(#; DTŶ@ Nېmʅ([6z)"M:jKBF Hb'fB*xY ʻh-'Jz~*#k'Gxl+i`[gf`f)jcn`OԶS<+(r3ur-7dԼMh3ϊPJPt=Uz,Wwa] [z^uGG%S0n ?!SM"} k''2#,kT&ɭ,?2_QtF4eo/ӟ⇿3gⷷow ( ^4gw/[?w?8ϘsdQ^I=#BzQ^0Fq˪mJ~̵~gRtZZWas WQTV +E*.[TUa1^c9J̪J‚]ۤ0XS'YiXi|}sBRnE~򗒬/.{;xtCgkzhhؖjjľl{|hgkr[=j >kLȁFf>B+;ׁW`({CRCQbNck)9FajR(@OntK讔L~V%\mh\M093̥~j&-#kdww.T ZHo*7Rj5d@%Nsn|8I)Uv%pJto΅'>I'D'L}Ŀ^g^ }ѫF+(-J" ~nS[hRtJ)#V1aI%mVJm%gD1R`$A$'o3iڀ9Ҩf XC'*#%NOTl鷷!gP-3u֓H:I$-cF1Si+Tzy!F܉:f=CfHqd@`~2{nz(XMSiiFUWC휿^*@2ݹ=Q4Za+iK]ePdgkR}k̹ήJBsع2հa9]A8z޾?u' nrf̈oz~@-W?@̩ T9Ue݋1[Q=j–;\M ݃-w8Ffhdo <ð~p &Urf2-Yksح}:mgg W;"VHܯpuuw{㮝\Y;slKѻ{ΟBX B:)~ytS&o0k5Q=ͥ}&1xfN! ܏G}W0{mY_#, ay1DZRIIFgK7v1znZvּuN'vGr|WWAaS|cضUW;} Jˡ-fO0Lph7Pb{}TmL<])=;)B 3csO0ВS2 v:9#)9m_W'9=>3>/f|^k]49 > `l[%Emd% UNR4!9ҭ76eHQِg˽rWja+i"*5zx.K?'?{U珷wW[}ZJayEҥڊSymLa$KwI}*dJu/եIc.|g|Vh4;l)t40D@KFE@>oq-btOcwN~H#[(0[jAU_ G˅D\t{su٦o_}(f6V"Z Bp%۾@$7^p&$מm9W؉yv0ڂUky# $ouh@:3_3k/>bo+Q@ܺ(d|0yT(RC܀+Yxu<Y,Ǚay|YՇ?FU%?k]U'U O2ARefjnT+ThwiZŐOg2!&Gs8rE #VMT!"Y2/ S&1Pkɐ$X%ufAdȈJc#a{KbVO33!+;:iFEi9T;w6F -EID4B'4#fRNCH H!NsO;R(>wSMkE{>:7,^4#X,akMSgj[Iu-xpȂjRу{\:Z=˛ wEf9X׍TNT`Dۛ -vXȮyrrixfbJV2_RMRMBo*u3|0ݛ H~s}H.0N7PQ2(3՜#FH89'KS5<+8[!l@%@Yi;tId /凑5^d:)[b5#lښ0Oo bfF4Nm p6>s0_1sn)/jE%yl QڇHqpKb!ΆeBf6(KhI,謬ݚ60/xbvW0vGQ뎢s9QTTn@ez4à yRY1!-ϡ9B 3pi;B/țtmP3'+u3GrCrcy8Yǖf”(A|ِVmH!XmR)XT1QƆ̽1!{raU< a@Ւ#\6rJRU_;XY:J8g끩,M ӂGfO*ǁtėLl˕]ᰮuSNO}/Mk Eh"W/C+!p<@k@Qa/zsrW/o7=&t{4\on Wzds=NWBur:;tX8ku=Tը2!-oF?wݿz+_G{7{cY w-o|vdz =>ur[wsF_?) n{KP΂IӓF?_&mY[D/&(W˭˾Řeܹ%-|=ur>K }hz%L[k;3gbD_oBG..]d+tsu6.nٷTػwQwC|>1Pݚ! 7~J=JuQ-.>?-<k5*Bg"y."KrK-"_+47i3PrM~-._ɏ,|ug7.%b$ۋ&MWI fNM ao/V0;݊2Hv$O󅛒K:>iv|ueVAGٯ: bedfio}ke/WXM;&ùp3?  A4 Y"qպ ̮lej^= <-SZhFbJ^=6M%5}B}J .mU5Z땃6Gצ Gq/Ke8ʎ瓩[oaF:ay}'U'>:uUaAXUGOfZl 󰣓lzSep:´B84ɿyt3'H*]O: Ho?]R_ς0w+dJ2KP..5KpMܻOjQ$ =F^FC2*h,D%sPVS)Y31B#_dc.P ! XZ:ސx0f#RٿꝙϿ84͕+dJ.',^O7iOw/&Z(ͷ߈7?3?#OO0޴Se/?xzϯoJz{{qAveofv } j ְӓ w}ߋҮG3^ԃʻTY\.*2x(9O 'pS 8 iBZYmIw8(uW/:K^8EkKJXyT=V0wZ{7٧ą8HN % <29gْ *նj;aB]Ad1% 䬓F!yҒpC3sܣ!{P}[OtȞ1 lì jpdd㗐&G[zi jM%dF+yr,t׮1 kC*KN)_pH2R%ȋ9m S-8-LNur@mscv[FKp(wRR^iX|/݃jfW$6v%wo9Yѥ-7]~Emytd.W!ݩLY_)pY8dZY 9 q@&MFMBuT 1oyc>FU8=o++af]Kx6O9K:yRi{݂j0ȎYRU.!A:Rc!ܰ.>]n<^Qڌ&rk牢Yԛ>̵/5CRnjcjJ `jO?:c* Jz1<5ꮻ>ZSR~78%*9{cB=")#`2 `Cu]*}P馏pCn ^Ү X`9@/ $zk6.rCЭj %k+{ÝԬ}NQUJYmᢾUiwMϨKP`KF :~L;ZC!>8jT򉁤´4+<.Od3i]#;Oe)A\rjfה4taZxJڄZ(cgM28%m!fYdGWK)_pOVhĦud`:t9$y{Ǔ3kbp `^d^^$5m׃7͸RVZV 4 YwoZ/[eK%:>h϶Zjp[2L:~iU Tf|-~wzڇbФ\?~p;;1mcQQe\3q* nxNjdq="5#Voe6`R2iG;9ޒG8@>ZatfKo0pf}Kgj7Np&r,vzSzB}.ԛJ̈́Inܻ"ûwmq @ϲ*i`$@p^,x]lENf$E#؆eA힪b&^,Zۍb߲bE#O>( 5V:<pMl>-o84||~U+~\i*%Z`FOf|& >ʘ\GH흱Gd2-Ičx b%SņZ)]S}7v61lm&/eC'uDȺҩHT"q]vr̹t iL`rfcX)uR L"KP"xP K,[C%KσYɜvӹt1>w~sgj:$^cX^Q%j T^; %tAeJ'û`s-o81[N$)ͦ*c4{&XqYie/C= i`l:d8ez|-̦p:E%_L -j8ވͩ!pka#DJ<~Xg4rjk8GZZm kԸxaذ\@ 0@)YECٌc{OHyveqn[֜l&j9\]{<{;e5 L18|nZKS&eRjhyroDQut+ןqfkVm@֙<#c`W6\ld_% HI7Xvtk;Jxe4Qnκp];4]:ͮf݁rR[˰NX@6ʽnM87h;{mݚݓP^lCR/('vGA HCm ڹgH r3 lK 3+V(fN1œAr@߱mcDqldOLV&qu/o,hJ(=&ؖ&k=IAC=z j;͠ߖh5vKk ,2il~8cOB=9þҳ[ܾN?a̬ͻH} إFfR+y?I[);ֿ 2Q  VJ&Vӕ~,w7q-O$GĀ5Kze.&!|܁3޻(kBȆ7(<,rr}&cNoccl=cNѮ}/J'_LΎ'?em :󒵍nrDspmet$5Ð]5l]՛mR1Lo6D6*jDQsxMxvxLU ,, aŮ:;Ye)k}-p3nj-7}9-]m³i#Az{띯۪&x|Ɵ ؞g\;{^٥ꥺ\(6Oy2 ,DڪyL龼S<>Ly끹V:ō)Bԃv MS\ڹMNhzt@=)Cg%|JF&Cp2^lHJ8K3M>~4A+(Br/-W5SDJ%2EM.ѫFl=i7ȪߥUm(ϩ\4DȐ 9Rkr<[~f(8K'wY{tWGU^:Azx(ח*[EzETZ:!@hT{RfݣmqN==XzP`Y[wKM@gX[1|H;Y̻_ Z Ϗ}@Ƴ/:Y0<ݪ:ekqM2 ^] :x, {/eϙlX4pXrԵm픈}+G^os' {&dj6hŊ2ib+ru:&laz]G#Uja˴wL"I2e/x> c.8!څlWS0m^7<{35)_I6[Ug Pdy*.ʷa ca:DjU;900px6|VJ8J3*&w}w( ,>vB:2C2aQK ؐvHm _tra3fKUK42#,d>mnQ\lk8\nzyC[C[í1&^MT8qš%5?r=ڞGn Chݮ2|uHPml׈4g_dLL8h 'Flϸoϼ ;96^9`/k6f=})/S 2vU ̩͹F lSOf1%,Ar0]P:—li7J Q1:;Uub@&ܓq=DjͱR|5e1ϲJ,dTtYUTS$(9ևNN]QNd40DXA&"v%A@}uXjdjle\)m&Ak,+LiThR^Y 2 F3 Rɴ<y8i2Ůqo)8ӑрs%ȏ muí %ȇ j[X(0vf^y齇Z;%tf^)P9k'2 K& Wv{LN~fϯ["xIig1{MHϟaZӇOowd"5_u1z@YЇ]_o˳݅77x0CSObo]W.5؎zNg{{(QVΪ7yJL߲)T*݁r~[ɷpг&4L$㏏0mʌyPF"FXqEτٽ3u[홦>C*PZAO ىNzfs2#Sӛ8ul@Ѿ 'w43@fr溏TqeL;Uf@Ks'B*uʖJK1UK~zݮlgU\Gd qܔۻ>; Vl//eEZ]GqU!6~Yﺝ'7.ͻc/X˳LTv܉{ywwAJoxػߗpu~xw/]xĺN⣸ɯ_i}]1F-o$c^׺H2?ۣ9=ͷ3m5ec2"+rq$g$a hca#):*NdK⾾.='"䇊h-tI:dvj0-XKo`{с~B/|gmF}{+¶~1hbI/nF,$hmVkzv󦽙ݥH˱U^oۼ;ji1Ux)*N`l6ktGD5F.`RVBYr~< &22_va`ec@նBG S^1BY]?y|чW\r&uőAfx r+-\_ E 3ʹQ7( F =Ո#"QK! ׷O0" }9!rHkNx +|?@6b #;HQ,Nӭ`Cԓ7dM߂r/d-FKԓDm#rBL8MO۠(%!%"C36Q(][lu ciAph^iXU9(@FӒTB!߶OtX9iφ09+%DWF QI(r))fe<̳'7s?ol[l] 㪣ȀzŻ#BY+rsY{?σqgTP,ФZh}|MMM'[={H=wi{O j?gTpr!~3wWG46xu7]Vj2$c[ ZJ`v\̍Y~5OfaN.Ùl+UY%f0\edZDD"pU dT&@F^w3wnj=-GtO&.sp›`ߋ٣ "xEڍ"ҁ}x{uKKl%,ƱwCBBVHcZ[k  `+[yiT ]s@RA]J)m`i7`2j%Lq 1W:Yya$/)rCYv[R.`s0V%3ܢ.WQP+JTUS?I;` j%#Сu͔HU~C]݈l n'(r#C0ˏPE)ǩ! 5pG;dTyRkOGo)w35=`gz3֕X` gp51+6v^Lg,\d׳[gB2DZ3yV`8RiV/V4vmÇ0Pž;f0c.;ӹWM !LrLX~ ۻMQEg.pʅݰi |%zןސ_B þ,-CfcxMQ^sw5+I 3 v\ CpH9I\FF.Ԓb;]أ*GλCG4/kvrB:1.  NPpFzfK_ɳBED]jxBǪ JR4'<:wU=j=oI3"2Gv^ar 1dx0 S[U/s!wL)I<7f`C*("VEYH& tDG.WɄ ^h4Ue?;^`Xn޿) &qͺsv!E$9~{1x47x118NJHTv9"=lXܭ6_=3f?•i~!/sDwe_75vNfVQ)uS,;djH*µ@ u' vKc5͡Ok]RtBf|ƙ3㛺 !PȊ9+e^iSDQ*ұ5*&E!\+eZ"/ w!/jT˷{!8!5!-֟a>~· !4,"TќesdrR!5 cE(dR*VQ{gT_Rz\I4zMwZX".ѣ.8+f2Jpkng4sOa*%,#y_m!iTБ]ш8jHߎ-|DegT lT9'v;)Aj:lyqzS֟ް- þ-QV2.IoBt”=rRBpҒ뜿.W5R FR <O0iw/n7_F,ƣ;p\ZķDCqPcd2Oz/ݫc_i*toâ X*ѐӴGJ{ wg{:b;RjBrAQ7(^ajQU>>VEUg2-tV!v5uuv Z"Io&$+OxlU⅙2tl!'tFCڙ[ ֯<ڛL@vC3g 3MJc&Џ"UlD+^'v\`wyw/zXaK+Rd"8KW!xVu8|PJ){m8suy:W /RqGZ͡;O;#|^JqWС8 V/Ow겦pꮫ8-k5aEwqw%8׈.)TXwI:}@w!H%N~ND4ޯBN9l!}jaՂGCL'I'A&U`*Ut/4c9TJUG%DXF g䱗q ȈR_ *y%I@v)]/r'] ([颀)__J>Fkrv@՟^N-ZpDMڞex{>8!kx_ߜ&\q62*-]{ظ Z3*$!2q&}LƙMݤ(aW2*Ld0sZ(2B,$ȲdRzxmF-^|>ނq7s-|#jPOpOUKU)4PTF$"PI% Ij- dT)(i*vպqw{Nns[ӻfr~o[{6w0=U4/><PP^_jDVwGazN~ W?k_Y2_,7}w7XǶ;v"ڷ_$-PWY,>zy励'σHʐlS,7}ϳJt8yB$r|%[>wC5G&(ߊ[}zP}Blj/קcyk9/v|Cv$&TsNjI C|8oցb5Ҹnzo>8(y"[2^3efW~,?zsoo&o<;D=OHZUu&O;dG YYߋ%7b-0]dv9TUUȈΙ( _\eKm'9cpBYŐTdL"G7vEJ @j)tmBI]"U3ZF/Yi<]a)~RLL#JR;RSkF}q7^Unwj7RZU#-4b6jcoe֞cv !Zgrw~SX?O"Bp~oh trRԼ*SJ Isϫ'e<_VveW2^Vy4HIQc;g`L0CƛQBO)~lzCifOϋS nYע|-5|> w#k >tW*?F&w7)ꏃ{\h&wcP<)G>= H爢I!ztxt*%mB|K#|T,$s ynw`njQ-ہ OI&wSZ_t%>)_]?L/GpjuJKǞ|A@s$%l |6q88N>, +9)%Eb|si'AEJ^s b"՝EJ8>:ܱt1a:F"hu"]#l r,6ŋt0N;|8/3Go%vi-%ύ@Q,!212DUɦ 3 l4sjM0E/mS74yQ5@[G:Q5hyT0hT£7S*QtCi=]4:rtDGS-)"X[u-Tv`Kn ᇣ;WA4t\װK[|-{)xt'ˬ!(80`:՟>|8U>2-> }+4S^:<h~[ZO6w?٪=YSVǹwuf4ztp#R~Ysu>8ܬoKkb iAQY&dRs/" Ƣ(aZO' ++T랎jP?`buWojq)h7ת^%Ut}XXNԆ8on{Սj먲n[ws%blbdq_h"*;mQ^1seS0dJ*թ# .R~,:rh^ 4,3 !367n#鿢җ-jxqdJm*pI@b>IdjdQdhNeflS F4BQɉ.żooCwx?:B0T!aSTK +n=8,kZGâyak1kadwÄv8}#j!L A,j1DTFRUQ4a NS~3 rpvMH~첹[\0a/@cY߷y8iӻZū a2r 80ul(Xo,MSaPMpJR`Lj3&)#Չ' LR'(ϘK&NEqʘqZacd@x2,ʈ6 O+*x?ͩ^ y4AKs4kC0PJMDGs#CiӞ$%(9-"\PN{ 4Ofw_J` F;SDH4^9Sss`.& .lS@p)Y6i ]įU I%DBj8M4`mfc)/ƴpx< O&`^&gU&1&E6%*<)Kw ӌ':A2",u/RL {M]xi"Měpr4[&z[h>ti6u6)xwErm'[l#U!%[TtBlYts"D ě/a3؇dcKxw{fK}D"IS4c Lf.yz~Puzv]TH5=E6k;\vt9uRݷ1/A-~bZKf`a bue犃Tᖏu#1 >L^.e8s/+q[jODOWGj>tظ1BcXD"Ck!ĴÎ"ÐS4IYl&;.Q:ge >#ݱc[Jmh&3fZkJFec$$((AED 5 o0˔$[ĉ+HiR?7P,!LtWũPJ %wx*qSc%iҡTL;SG + f$0eRj,Yq@ɽzy3TOiJZi{ 5Dd`Mq,Aaƥ(KSֻƑg@j $HCDLL gS!iL*B gJ ~(V L$M5`fԿQ%fU(v_ 0 s$P46A%_ULPYE,T @t7AERp ۽LP5O~0< :!#*Ae*AjO{‡ *k(ԻQ敡sAdc{g#[دzS(r Mq#+jyRWcfٵ7o]gbokȰxmD) )lC&\m$ kl}ȦncQ1l_40ty=ң/~Zq#:b XTk/ȨŇBПr9/Z+r In2R9AcbIsOHoQ3g55`뺏*WjVmvSwkĊ+nZwE=|kꗨ$>U -B83O&ӝ\VCT > b un~JgVuv]gmӵobu`k*į&kĜxkn7\QƯWMWaiS5V-lg`, OF 3Ne%Cݴt=0䉉 O5'sW'N 3Nﵠh/n)+'^1Ias~(VQxFQ/ q|~9ߛy󽿸˺ߛMO7_kҏ1A8vPиsG~X&y|ڰa@nu𛟉l(0}qřlCMM wc3z:1}yLlޭ~fcwkB޹ֲ){7^:e>wө*|ixmVOwkB޹ؔ@=,yiܔlnmí6^uyt %-F [܇evyz}1х"saFwoT0R(H ة-h3˧L}/.{XCeIzg]Rr)yނY\x={U4)YP%Gyu#P1߬r@V9&tsUvP[=X%Y?۵k5 Z\f=td5=Ƽf*{BY,[e]ʈ<#t>/1b瘻t M!4@jy6Knw5&0$gpanL)1FO0&y~Ue*^-N!#9Ñu"Ž>gmk~{&sEiCO*▥ujչ{`)ٞ :G@a.H8HYII"(}}^Fۃ\W0RR2};<5 ʻ!ٍ3r~ օCZ,`'%iq{d#x-M'<^ ZXMl1Mo\p?>f>u+I[Qݣh9]KU꫱j3KYk"1H_Yݧ$ut1%+k q# weݱVcZI4.aDT'D[IgWNWf)%廜8gB@(Cu:cFlasH2D1D=) %}J/8ݛ!%!'JfXSIȈ"*Pi]j3ʼnucS8CX )Vyv*؞ Tpu4 UFr4g͌F, p ֖ }A GYTC Y*B| `+!5|gVOC14mTK<J#B_O}"[?6.dCSRf8w1u}DxpRAk?>GtX!'? ~l_U$nb4NQ8P!%e f3Vl_{^H!gyt<=ѵz61BRȝi"ԁOXտ:d^xCfj4JOFX N@}m/?_ly]46"ɄQŰMbR(PfXr`wKlKv֭%lǢ :ZwUv'^,{_' ֡ՠڗv3 (֘*HHDJIl#^֩ k} -oA,\.EbBBwޅv|gΠF^UKn`:MD)O9v`a4f L,CT܂q;DA XmkLMn4lPI~lO1 J9"Ue% ԃZĔp$C\Ba$2EYSǁhκKTKJ>3.#W[6Y/JS4S%B%撁Y'@Q'ṉ^x6(d J2dgX錥ȔԖJwBi3- 8 !Lɹ&ڂx*TN<T e>^}PPf),e*e^fgTVR ւei,kڒ'h;G'PH tރ&Or zfG9;Gүb\ ()6QPH E \Pʀ O+w_GO<*FFo"i*\W&7^sY |X,Ol-B==9+U&?z5W+VO_64ëz|CpS FxQ5,1eDs$eۦng;:SmNVEQc8tkk` 'kh˺;岮./X19<_X#t 3'A`~8ۼcgKb cpU]ZOիGmUGo5<]}dzU PҘkoU1^&v[]gkM9>&i_͖R 6(kKWC8J |o8\a!>i8/!,۾r?=블rhWL)NZt74W!'ƤA!*l_ZCP7UQ+ļk!U @[INzA[߂c\V'KM ͜JaQH|xMC D(pa<9S fgT!p`ڒ#ᣀI'Y@I sxy8/=8{wP*]RF?6(ʳWr@|h.6`W_ \:G[v޵5m#š잍4_{m"37ˬ:c\ {#V+[+~!_R@IMw,Xfp%ǺWچ뒗"F/Yu ]TJc-pP(-byS( 3"T%DD!bSh?MKYa:a)g"g@c L>\q| HFy] hf&0``*Ĕ EVĮ@bca* ;>A90BVPa{31T! *{ & $܅#1X>\(!Őr ?㔝E@ Z 3Xڷ ;w6\_{f6+T`td"(M hOK*mvP}pܔrAs:jG6#\;mJGU}bumZwzЧofp]ZJW)v=ݺQW`VEV7}K1c[ЭƘ@1"LCl\H@:پĭۂ1sjZnv eyyiIlNYoVWTڂ$%ڻØ՜nN%XG)7tL,,w[VZ(ƘnI-h ɧwWMu#O|ezk[U9>MtV!̝7lGLې@>7z/ҏ XG;x[ZYamݤ9Ӑٸxݓg]C̉2, !_9D}0ErGk7kۡv t|(:y#oMunu +rjqi7q/l7_@w2ڭS ݼ'@W]L!)qrN3_"s$V=O`:}Lm\ (5Sˏ?^ }m N;.($l;mۦqE*GC>) k۾3D vf"ygΓkGMsltu 9aտ̊\n,7G:kmmme4 vY*clªi2+&˜ϹgYvJ4[h 8K0@ '@{bR:|3,$ :O6p^^@>Sʧ@wqJ/R/vk&YMѮttTJ޿k%㜄2aLT!P$!0Kl)0"b(o:!w5Wjԇj(o.6ؐՆY6$M'DKN>/]]Y0,0;Fxm@vkaj6X|f槫wwoPvw.1ߦ|=WV>Im4IkYVm80-"NY|7D-)%WK)(Hd ;q\J)IHN8W 8PPr L3r!V!DPb>K;vڛٕpQg tC)C Ģ(=EH5\Pz( y c J̈TwTrD;o`iy( @)GE1-PJ(%(=EH5ƹM#J rC)ACA(% )Ր~AY:4e/NOR J@.}OS=*R%F)Bn(EOG.Nr?obRL.&@ Jd>A{RPRM1J paY]9JOS=*Rū(uk`K( ).=o"R 0V ; y{[8^~ n1ĎrLI4{|6 `nbZ7)擝brS8˃\KC-@-iY/#mJJI XK(Dt!:/iο䥂)9 PIf6CjiֱO/ƠJ%&QyE{+Uǘ 5H   P5r.Web׮D$nOD: @} サ'b=O߽氓%VE2ד^y\T dS2+OظI}U/z8a5ᢟxowˆ{ _R)H X.EG^84` v4`Gz6*b# dHB!R[2DQ B%PĘ Iݷ/ @\d.{77!zΉ{K {B16=!&!!@:ĘR(q c#qd# n"*th5G Z%ks<i~b y฽ ~סGt f%d 4dI&1,`^SDs?o0ڝ3|YWa9f!!:4[ tuRK) 0[r]F9pBfxY{}iәkP"oeuje5@%$jQ9OH>;7D¨1R1E'1"%"FX$-$D I#0lύFnk'H"j -no]J8n@jVp1?§w8~10?]vy}_ޫDt \I$框A)"FTAD9bJ*NW;wn_7Av_ح+;MjbpWha7ˏqX{^@s3RPOEq˿H+EurD"5J+5_Pa,:7ЉX(eBiC ND(J3hX\›0zX=.rG.!Ec]E wk / ZX}"yf3]Clk95<(̨U2#j&T( #LDG,$0, kt4"**Bc=;"v8D! I "[?q1SDFHEkT@ "f-X.D-˽iOoT mt2?MUxJ7WC@Q~FﯸIisYì8UV3ccϯf#J"oҖ;~ftë`!N *Qr}F#^džrDƲxy18D쿏$ 9kxU&iQJGIK3#;nb`u0X{q(ĉ*!E!m~dvVFb3:B0[PdވRpBEdmmzxV{1,o,h`Xf {F6@P!( !An@1pR9Bl0F K+p=R9Ŭ`ml!JF#g=mXKfv9U_ޒ]búKb-3p M4HoQ ARSʮh4)ZPVեG Ő#`fFJ hpp8\HpSon)*0lESv/3}&Ry0e傔Oa91CbS$Hf`ßc5x8&|NyZlw8ij]+:nT_08v~ava}~dW5ֿɓyCZRlZZu(;tR^1VӫwVInDjucky6䥦Qn'tc_"XCeot0%PvԩȺpKgwN4|aieNY&|#ߧA:cn^VMj7EGVBrzaJ[-_uCQFuyִXGVBrb8&p(hHہTc/c B@ྒྷ>d2gV齞긧Y0>>Ճ nLݛWWןz2WC_[Sz5vqiĦYSakIiıaCʤs2r`tZ{i/8PLmSy%.g\8,ru&%1BfoFgfqQ%kM"y֯P̡l-㾃7c<[|r*=_m _H:=Zr)i1P{!=ݺQSr "*z<ۓbCe' U rL_ m=i(C(=iU1Lв^[%yHK8ڙ RoAu ۔]YjnތvWPAJ6m^&Y d$17T G%Ա64sq hzT՗94Rzbj S BzdQUSLaaTkÈBi8s{A'H07kPBDr(BceX`GӤ՘ڟ>[Sޗ :j}*D" YdEVW4X@d=HSC 4ϝq47; jFY;]^s$=H.y(H8M{s)bq)rhe֢S2wg㏌gu|hwKν|<#Us){zp3rW]H5őJITRxݩz2=+ -qV2e!š=; 1KW\Hw%m9A$$룂][U;S `xl²%y$Q[&!RC?ԍr _o ?6Q+abDp˜88yG(i!IO8@aˆ'3ᵷ;ono/4bY\.! kT!8|'៷'2RzVLeR)`[E\ks&:n&QHs6r! r/{1>S|s^߾,ikGS NT%6G(rO>b!!lF%4+ G; JNzjZauYaf1w.h"oQ(էp@$VkǶ*zca!4Ub7T0%J:31 gڹf޳Z1ˢ_1ȎKLO2RK&qrz!od[ g RXVnZ`KVK5zq MНr>'h#:SهXx%&NGQ͞7UuЙ??KV* 41Z7ػƑ$W,g(}CY {P5/ G֔lit$-Q%%E&I٬ [_ddDFdC|T"MM0~9?QlSϋq0Q g~Ly–$g!'磇o.&٨O'iXޞ9dL{gW ATT!^ QWdr|ͥټ:IkmmSZ!c{( .MY6Ni`A:FL <;~;94w<> x`R`Ss>z@qoLN̷sg>+H̽mT޷9WwVA7[<_kezNkl?B$)$xX%:wm߶äԈ2-&PXXK75soB1&soB1PL 5M@~e6ՓfgɤQ=-a"xUM;k(r(T ? kv;sꅰW AH1v a5t.c~xp[ΠfV?9$Z!ay < X,)3kY-̊k#Cf-fÜp5{j5"t1mG $zdfVޮ 4 nLEt}!!{Xvf0Έ: @>xi GaeW]~<Ҥmr^(=1ӎ)EAT_moֲZ>[uE/ }1!8GB ""A? ۵Sr  c Xf"ʗ4b/ӣNH @T''S,QMٽ9{7쿾s 96ʮDtݪa}{ Ͽ8pYn_d P#>U?Ofb^^/0bAq5hwO4籄ds/Xc2q X-ksy]\m>XJej8f\z~2+~3Y<==' +Д%r"w!l)x$ʗjq.×NJ!3}PNY)ǨۥKEK)vRMA$a7t)vRPM佔^b'$==y7t1v~r;)RI'wTcγ,\Cd!uo!⟠nj@+A/(I)JeԅwTc{s;)ɊA7ti %Zhko %qF*̅RB(0<&1,DDR` #_DC@@$IŜmCh7\7TNU[E vW 7t/w.2ܲͽؕad&1e)燏 I@@$rrT U$wuic=[H"˅n#ٿ]Ĭ7VIKNw8."'2h;$4My_kj?^EB~6F֌CR'yYD5u!-q@8+"J ss`9>InhgQ4% 0[K!ݹ4w8uc y&h+yb!-Ŵg*/(~Ago LI@= =C{ѥڌuZ!32BntbC%tמA,7W4 $!e'kJ϶f"֎y`Zwުq>G!{WN9a=F)cw5u;j)sxMS. KMƆrIV%wӍxkI.!K. Z_el%sp#WG3vMFv&d>9XlRw;iGWepH H7`^=Uh}R+ʿϭB@'H%k5[?CZX@>]yȌPsIR|isDɜrnrUiL0u1ŭM և_>+-Vr cCĉEVGLIn\0)`qs_q,-#:R厙ֈ%4VQjSG`ϧOi`|o3дKMge ܋sN WʶybTm|.()1^ ';ǧJ;^Ij8Ā о$6C5LU]7Ɍ XwN1? QzMi@cbX)Ğ"b l3KykaFk?fpgVp;C7+P(~{CZK"nUA;3A3?ɻAÿXhB;L` i@#n{(PYiN{6m*#X{7i0%-gq +c佃 Cd;SF1fu7aQ#kOޛ%AX b:'c4 go8il N==1uw`ǰGgsbrD%V=jl{ۇalئ⽌y b8$ Xr UTr 84{a%SAɅ|LcF |BL1w D`CK eBIEaH Zw:b,65rYg'aP3Ȳne}OHjyInZ2*&ES%"OM<< V}v_qESNG!)}ݍ$-|s2]!Bf{m ?~֚2dsdJUgm|,f~N2_[jqcz٬j0]-x3/6r9QA4'e8l_xֆ-&e~0:Mc/F?n*0Y!m؛ /3L5fʀ1*|rF'XMiK#̰ hX`=Ul0׍fUzX-Eۇ?+:zd@E}p+:gI6rGŖ+^n.s?{UP`7n.cі෴IDk(E\R;w+Gu-ƹNGj*1ZY@V=zSVG Z]|Qj&- 0g4%|ԺKFzuQKrj/:WKS]{ٓ76Ijb> 6ذq)ct lGmrMetTFʨqv;(|u/dqmʔ-/:P&\ weO%(7({?!4hs\ S!1g"\H^EI9 P(^yt'ZݸPxH&dX%i-̕ Z1 |G>(oP dp f8YIpG-Ub{Hr= @Ɂ==Q(oiZqv+Z coĄa\(% 3 |"LD+% &HBH89_&gnt.&[ 诃oX0Fw`*lMnjɁ\6\ҁ~x2Z pp6x|_k-p($ D=ޖgS_uTC8wFca( Cv4)\2pJ1 4sCELyP!Y _3iA5|uhp})|3I "(ʠh 1eɝ-Y9溑21v`̿lblP͗XKZ^ 6+y )]IJyѣEbP[X$k!NA-ܨzD'_ +"ɐ} sHziށw&.dc.[=B/ ;bdLuPiEP{O# 2jrE]X%,d$UMK (jRSOѰ~^W% <8\bCY &Dž u&Q.ۏA oIґْXlBƑ 8 c4EȕLj$>4d~p(Q@s; x+ޝ iT+A.A P8R$111硈9J6@HSɧL 3e^^:FJs=_vp؟oT&ۤ$1nLnI&AΙom_ԧ"Ʀb"eP!"pG\(b0 C[ZQ .toֽeX o@-F+b0BR l)lY0d Z9! HFRaQ"dDx6rg-r[em` FrC>0ۢsɢkBSU7B$vEm C#<1#\K+j#"8ϓH.$B ZC?!qӀ&~yXSx8^ RҊ\֥ӫb2j)|`kJhG.ʵs](PZ*PPM5*D`R51 *9i+JQ<(xQڷ7ú%y(yg*C_j-`g1 .ۥ\6Cd'|4vfo[CJ*j>gԓ t3oIH-Bw8]MX1mSd'T8AAwl*]GSH>&bٙt5JHntr(r{LŕG@d@9( ZMEL\"]WW=Y.ޢvѩ2ڭEKrKjwݪYj:$䝋hj-=&uw[nUy`#:UQFEH&Ecڭ KVsݑ)a)Sb!h/>f_$zq0Ql0QvMJFş_MU+`=3MWP TViKo,쯽a qG5:+cM}7u*xjx\[&VC(̒ML%Prme[' BRQ_ ;+}d@8E*031>BS1[#7ix8J-R`tx5,S/^Tmsы:,iq&o6LՈQfr$|4l?xSW rE-T;:+m6fEg.7p(1S B@<ҋ4)!Q|kn!sY6o-DBI%mަc)<>IQK'+[RKn( 2E"JCfG&9yAl:/Ԕ)5%n?~>'"C8Mt [v*m{+pZ@5|Q>B>[DBB5'WhIt'\ /~1  *Ps8oi"xA &. "ktV(xP獷`{+uj؀Nks;_1f5aS8,/Bu"8{~_bVHo;kqc&]8buyh1Cl)Բ?Yƕ_Y6% zJm$i>ƧKU-I6ۮ +%P&ҥؽnbgZ:]83$PL ؂ZG lϥ+̬4-i^6M(3mUZTv<_ WdW(^l_/Phbe+|nV$X QfY,>z 1A8CB<6&{aR>:˯ ȶ7N6ʅ@g9Y#ي5I2'3y Dx_fU @IRLZh$bFPJr0kS(5ƏL| 9].T0I1'I}E{6ӚCfص2UF2A-(i1fbJB&W+\~''qvŽgjz^dYQKq[~:"Κ16xugVj_.g]?Ud!s]ޜgTngϳ4چu8#ː N-|Ivm%/\/Vx5~>̗>zVx#c} g]0!]3$xp5Bz4t[A!6İA׉&4hMW#yb6gЌm:w; MVdć[D3fO0 p(ԅ砣Cj>}b\wGWؾi_|zqo#x|m31鎌M~yѽ߽c#{A:}^_/2amPxRpYHC~\V8qҌK钹x3 Wy`|p"|v^7J̬d)]71moZ "'Alo|?<L wR$]耍(ݺ ޽bx4yasب7i!); kg?8 sk֖5<5EPjuÏKh _eiK V44^jù‰$foj;o*4c4]L {bnM/ͥN/_/^>@?{kt_[3H@3غ3c p; lgZիA=t~N~&5v]nu[zy0 Ykը $gӿyO>ZPlb #Lů`!)}H_>FIaGIOd8p98~i(r4 w/.u]g/^اA:(~0Z|f ׂ?#(c?e#_nIsb7$fO8o | \rX-Mgֈ!{>&jer\.Ds0{b) Gɰ`_ jm}_(1,D|ŽfU%'p@GS-Tڳe(,z*}G8hsH?vCxj.W{tN& qM]h"pN0+uy} Dk)Eln;D!l ui;2W}e1Ccx,D#I P@`ub$IdcXuJ1Uߙ)ƚzr+@J1j{~^!xw;DZs]TgUr[M4T !bƄLxED4摔:)#PӖ+TLzbB} ꅸbtheT~@MC[8RqɈ@!R:I,cH"' m@ 7PȂ*8T¾[GL܊p%AhIT'4&Rm(!" XqLeATJ}øՕ2nAҢg*A}$V[$i ὅ"36ma'@ I䭍Gc" ]-ͬs޺O2kr~h ܐifEq߽SI-[Ew!s`.:zqU-_^̕ r\! ;lտ@.6ךs;һ"w*TPKy2jK@0Bإ؋D3' v"TbpأFQfR8TrXEMʛz 7=/u=3,6 Le 4S N)1=J[RtcWz)ߟ\WS*|p+NBΧb-ְW`Yh F׃?= kD`dlFٸo&HÞ~_߷?÷z~ݚ7/Y/?u߂ZVe}S [DVj(J웢;kɸ[~ٲҌs,\T:왶~}fX]edmF*k A/l7ɢoArMOv\2±I*cYrXUT382a`O(mMYʐ3CN=yO9hZ՝CZͭ.KpSOFפ(oQ(e}UE dE8Ŝ8 %AE Բ)6>-> Ö$WKNX@_:T.y8E#BcB 2taux6~%t# 8\]ޟnK,$뚖.9?8blyzFS1/UYXC}^5GXe$jV/ˆyBpOw&:TL 7>8ǝR >ΫDE4 $[vaOf^KaAltw璇I{giG-'F(nkйg|&yEPSHV){30?_™i)IYy! Tmo"a{"zmS۫:|MٌKQKIEz;{dN`zRpU s9kh)$뵛*K:zWv_^fRUw97 tV?ttDӷtm+@SG`q%H$i ~̠{ Uڦ8͛H3ڈpZJ!{A5>5j `4ϳvr r 5kܵ|zXUfDjNOC~*, jE sh!Any=`s%̷G+r+}l:^Tcڱ`V @kE?MqbuzǫS1Y-T{qZS][δ ̄5qߜE+'tʑI&~A U$B9z1{ˊ>[Sn\M_(H'l1̴ %A?[skNzle Mbc9-K;n12: X@ TEX"HJJނ JX%b=l; 9/Ӳ˴ޖ)u3×gRHE35jތ>a+ 8"t{FD\ ԬO-$3Q=Z<ٮ=;Tc*1#Q)shPDr ':n>Bb*?J0z=kiO%I+L*3B@ȟ7 !|%~ɟJBbpߣtw|fV3 ,7|;= ~.jqu+JOjW(`K[FEOm[7*xG-ń*1JhThq82'p}[G3b͒8RЈO镑Y`?i-|ԶkTt37PSK4ǥY.enfjJșK6RǥT[.ZRǥjʸ>sRN6Ftggt7P3!β)s Rz \e)̥OK RNO`n=&ǥԊ3>i.Kf۹'qr\i?.eIRAM9:OKR.T E$ aϮ~wPbf`oŪ=N^ M8,-Y43]/O}Qd7sgUy͈LoOhPkѽk`lTT{S<2]߲}\Ksflz ڛ ġZ#pAZ{ݹ.FߐR~1nv iX˷;a=`kOW͟jLCر4Zk!j\]n;4C;VeNtMf.+O~9< ՎR6twٹf;jjTSmKfmhOpZ5Ts[?w~]HW.dJ?ൖnh&:hu<}jYڭ|"Fpg멿vS☇n-i$:-hhEM{nΥj$+y&S@ %uZE7.8x=~,NA 4Lz'__\~G7tRHr_N^DDzң")igk' lQH)iRZB͘d1 oL ]5qn+j=6\Zk7Ҝ [I׽R/s]jonW;'o3L{).;WŻ؆Tw)QsVwv˫&. /W7µc&I4Y4[g!ő]<0k4w-A=AMdkhl?87g)0[08ôA=aWէWpњ8G+Ѷ6dUbejk텦^Aac+0>I kM@-qRLEJQ'HS_ דvwesO3`iq)q Qr 7\aiu(CG @B` iPBEJ\U_dNȗV2EBdk )bbHl8l6f2Sa/H7K`. uJ$*ߥRNaWPsISHz?[2%[S抇}a#f1b1ɳ-yc_<>]&W=}D65 *egDˆ?O@μ>qsw/ªv懟߻p؏7Ba%@j`͒-VLe- %UTFOԊOltxUbϛ6\nYK:W\yaJaSwĴSjfw@I3S M4ǧJ2ߗΪ\CCk vهwzPP?vc-^n ࣠Dmss\ōnhnWT=3]hWŦRzIfBPVCK2NyPde.a7G`=l1l˧5ɇf4izCtZ&j6xTw=x'E`,IⳜEƽ Ud%s S5k]KyR_b_& cRSfIZ'F9,dOax4'wGS^hGB !†9d` ɍGЅ*;OM!b]TA-P_-BQg ;!QJa&'smnQޢU>lw)i9jm} <ڛވЗ֡9c9>DJQAH=-hyN 1؆${uUL:*JVí*T-F4n-5~ ] "2"N1-:#>X/.,F[g(ɻrRHVZP$ʞR"V{ 02늧_ŧ[>3k"";W [8&9Y}VpAtAy0\2S R)%R9- ҭ$~Y4PZn}pͰHkF6>xƲ1gRq{h <N&/Wkwy[7I)-|ፑ#d|=tnh&t( ǜ# 袭K1eZ,#7H17XA9}OR8Nkk`Xh=m!vfO-G,˥ހH~ZKD] ( d46\Ң^BuO<#Q2ť!AQ "rV6{OiE6'Z-ͧ89oӚFIUr`!Yܽ$c$jk"Ԝs>cxןÐqH&RYJOZJ1dCcnjj.jݢRzzR yWn#JZ ]`w$p)fpElnpS RB!)Dt2@щ`}XBFy+6qCk\Ɣn&m㙤ϲk% Vؼ06?7AX!uT9uD+$nY7zώ=hY dd],ouK~3XC5e!x_ +Y?[zyQԛzyh'-t<ІGG.vO\v~-Ӧ,i)KuMYU7u^yFq[畆I+{&=T2.ZQ]=w] &a~JŒ@)w\l(+!PEY`oGc+!!CaKSZO#sDC*kFH+ׯ|8,agweᠨ5?k;/x8^kV|IUjc,gHi}fb)ism`t崰v5"eiaĀcMOǺLS Gq ۸!v/h&4@*kIf.(P ~]_Ķ/{4B69RUeM[x+2(2.6XϿfOt$.1:2:2:2 ft) *i,͕-#*i/|QxUXZnJdz zk}i\"^7vx] "OALFS@6CՂXcR9L-Zf;֋/Gb(T|Ɗr)t])*CDӛ[G|ǏQef6WT/.?ۛre$t틝вM;Ew- )y2uN`LZ80UF{('BLZ7@1OȻRe!$m y߭'o)Z"N$2R۹Mǂf~jrvj~WkCPho !c/bN;¤DGA!e<9[*ěQ C겆!fڞdF'PRHIA\=mL~RWš- ."4Mayn-6P+Gi4B##{ȅ~gru gdke\n_o qPTx]G64~:$D( Q+]Q υuGɳ\<3&F%~OXԔY)7Դf$}hvLǺh){ʅ8oZE. _>Y&=B.dZ:? wC3&_G&_G&_G&_י\υ0r@1u@ 7" AMYSLhU^xͬ)h'B]U.Dh=M:@]ph̅ዹ yNy\"!:^oNk} 헙ׄ@Ҏq$MZ$kiRڱ4P,KlYZ#.^+ePt#E@_z &PZ߾zЇdUgf {;NQGlԞ8?e<_8,ߌ w>ʔ|t_&?3Lct09bDUd /G;^H%tt1=|0KM<5J},VھwAɵX]a۝Y5di2Tufu+{=ۗ5-)<-61xe UiS4&beUq?Yp]z&. U*/O!~!*E{W3գs%;l 8gCpG+OwCA#=@TbԠMAeNl!B0دuz,o R61[\?l@d/U AY+V*}T^kk x#466X4Q ܭc+U(&HO>X:Kg0P5N.7V͖JawX}oO׃Ymfnٺ&THtL*Ŋ=sm,Ak8EUZU*])I"߷pb@%bK!'Ok? ij*)F\3ȢBc郇k fZӷVqT'!ځ9sd O˧:Θf,#î"Ys]D-T^'S#~_my N G@.eAd}z LPb2!F^ZOEkPXҿ-=cHoа0*.dB.a|m*O1VbG0TKZN9>s_ew6[C TLRF,0Oύđ(BjJ}ʻuZn2aXh͂®[3j  (}B-=CThdD_$9#S>%H1HZCy2Jp*۬oǜLޚ[I_Xm[o;x ZoBŐB"~J]pTZmF )_kgoEbSCXHg'L2;1#c.wˮrL7w32\i$_"Irm`ME=cslIʡ#֣3-˫e#Ԓ\FM1T +ݫ]Զ YX %6Ƴ{?,#;RFBIT9\e*5/P,. )bY%, Vݹd}t쮂KVԆ-[ŷHNb1<ks%R-ke{Py|n|zp&gteBZ#$ )ڭؖ~fK)US#QQ:u;66L?qjiDoއ3v<8$N']a9mZ#Vmif9P!. 8:;TkOU>A ,%#35(ysl*2]^}QG/.H(lu (H.m{ezNa1VJZR>E.ڄA7~{9w濻gb.)E7^%%, A呐˰[#OHq4)֫oX60WΑ%fM[>r0G ӈ֜~~*>+ REV~V 9ᛊ=Ov* NBוbØ Uq|-5v*@Qco; ݽY 'Md -8l+l!ƓlA"8h#Sdi3?_Lײ_9rJ)y@̎GUZQZ̙s 5Y+[Rw` P1$)LjFSqA,θ6T&h&߁I(b;dV"SY'ړbJYo~l{ѱno#΁ξ tZo]=lm QT】a_W hҮWM)n:-ՅPBiWC<93yoUNQ\p*:EG7&⽋<&*Wv*@Ⱥ R18]N6q@l-r[!ja "IJBhx{{>RRob* ѽz؎sw< FlAtDIp <ZMދmOi)E!3.r)0+LlHw±a\#JHyvRv-\+hLJLDoKՍ[6l}F@T]A>UB *i8`J7Q#hǁb8GEKyd1Da0u0^jy.)LAgtw1η7w//^N Rv[a# L$QW2be)[x-7)O7 ! {EV%}X[gdUE߻ Ӥx*0퍒xC :AS_06M9fw}=c Yc n(5쨎K=87%_qɭn Pϴw蒋V{*V`vc7F..j +SbH k1=O ez X/o[f}*ʔJ'ߪIM mVh*-fC?2!6fsrW0<= ;5u--@ Z咶tuȳ\8n[D4DZ&H@u,Fö)]*BJyQJ1v߼k]m K7`.Z#$םtQ-30Q^0D`1Nx<+p9lm3@^փQ꧊<:dvM c=ٽWEh<,z`כ4ފs/J_3f_`۲(meoC R'p*H>ۖTxdr{Kf{o- `mmmk'/\u-Vաjg=Sf_G=ILR]uSnveZ.3sD; V?84&`T68G:b89:y!h-5[vCF*}]Б(QEbMS,)g|۫AA)-=+;ʒhn]8kM?1X W~1]  =`:`E+0G"R!m0N#+\iˍ!;L&l!D)XjAȌ{,bJj<\6ecaEWK7P,JKۯ^FvXz Xԧ3/3?}me&`|Hc|]^} RV3R%}Y0žYEH|ƌ }hx< QxB ?|ć&FS4LN!dlKzA8;#T~Oa|QJ 3_: HLLėmg5^ç󍰠~A\4di ziP;o%֦ rJЖhF;^хK ̞ JC6D0{EG56Hڒ@"e:s~i/GŗR} t&f<8>2+N:;H sd}J$3G QfL) eT 5W;%qP ͽS'jHFoY.܆n!6IFQ0,!PD+s@w_&8ufWFs-Cšc\Gx/[[C`/(uZ|E9Kl>a s$8g丵IŹ۳{3{Hix6a@Pf!…9 @(>g QZ7~/* <0ՙ۞,np(|AtyKLˇGA(אpFugn]ǰ,l^1!0)@NK,'=:7;ޖ%&e{FL%`㼐B̛|98$BԶ&;䏪3ٯҟ?)$]м^FB2)b-${8H9]Jؖ $oWk-urC5"I$IHJ_^ݘџˢxuzdm]z*-Gq׻*t ;D[ Fӣx1[~AavUoLF"GJ!OI3m]Yjbl?V_N+4<pJ)*+>.뱼xLA4m* z eDL_<+HdDN'nϚksӑ)EkW3-U}mIȇgqNZ>$9I $%f g -ԓ't̪俏Z`nG C5ҢI+ > <y愼"Mu7-F޿0Y%-rBNIdѲHdXcpu7HSd8 Ș0uЎ{hj nAi+kx z>m|8/j,ӯ!<?O4׍ Р]G[mP <OI.5$)uy0+mX0X*CY !S^"QFKʫ$Y zqd"YQqAWDb&fhآՂ|ͷoZEBdi [)dF8$e̱cc.(fAqm`oWafñ4SjGPDx{a͟+_tdaR?c|`i\Eҫy,E]NSPA8x|2_,7zyG]~Twq\5GvޤӞΌFQ=5XM&VlW{O.=äQ>{EwW [9?ƕr;d`%odH\y 0gZ'u%yJ7g<ˡE0*YkE^QOh[?v4* nbmF׿"#]tl,| o"ys6_3+=_W(  ws~>B㣣ƾy-Bzt]t=De;h œ|As^-ॠz]ak Rm ܧz7x;U}7H܊z2Bygо]y>vKn!<3j{K HZogѯ%d+:5a%~wՃޛl4Y{~xO6ìߑjnn *j R{3~Pw;v3RS!K7..29x\M' ~/GYHC7`Auc5` PA7(|9ku!dӿ h1Q7X^mnѤBL Z7|z g@.IZ-nQviBn41-o \USDOwvBO5[sU21ȇ/Xi# <m[ropg˛1hb!Td]hAX;a>Η, 2-

%kJ^L><||or$ݮla$+b(Ø4lL. Лj1G4+ˑR'/7@|dHL4R&OHQ$kE1={ݻ{ŧoVs">0 >YFQ i0;?m*%fV|/xBp! &a$ IaRR m^߻S#ɥF1g%4bn$C120 : 6Xd^EN+HmE_#hN8- 98`d4 EI[KWء' &UcbHaL+3$G.cRrB##.ED[yKM0+pj,ÁS-8a$*VC廤KX"FEO"f0 "PS@RX8dĊ)*XM``y(= 9<%T?ɪDaDMM#T.g\50^\iSc>T# [kW!wLv=3U۵r_yS7_ FjD? aXY0 ߐUz`au(9xەz3@!ۆ̚~Cyw7ZbMQc/5dM;;bM?x fY##Z`P;x пAvU= y=Qd;JP^>=§ZH;jV gXG- ՛MK"e(89sYn.G8ƕs_ސ%Z%J ;Y@(ZQx:bVԶt8O?жdG7\8"H4 Fv9KTܸ3}THBJ"Jh-+c-1r>r3.vK 6J2H@J}V%lz%_MP%{CE Xf?i27` N =ӗA K.9p48dpͼ+JyIDÝ헸goj8!l g)8tשub%֦۵Vt;4\iqY$5D-=Bk..P"$hoZ *(2NP1v}.>!(e$)\^J:-3> HZ1K.5\ O.!8`icX,k:/},^fTx T7PY\UOIԢ.ht)xQ?>3AV$X ŅHZxPP+F%o$#`dѨz|uYM%L>J"W扸s[oG%56+ <_]hU7AN|T8vI;,%^[_T\n`&Mǝ73Y (;w |ZCa O\*5S2ktDh Eׅ-ӌ972IjKv<;\"I_Mw1Rts>Zy!~fmeGW]~B`D)JD*xN&A&h~ >cpWd҄TkyFob+Q9]NDeBfX5o}BA\L*z'2F|8a'^fEfl+QLͿV$( pK|DYSɉ@pQ%a-2YI>Hg`g\DzBLpSY+\%-“ |-Kp!}@ [a]'MTzٛ~6 F q@ł D 1E-) [Ϙ?@L9 #8)W'໫ G( DD =NO..'6ғFׯW#P˓Fѵ9͠;&֝`6XIt2O2>Jt H4^/oa>b5;/^Z櫑VP+e]i;traq5z y!I(!Z'G(MedcH8aKQ "I J"'10G^ĔڝWEB#.#]DsWmڟݞgj+mKYC@woF6Dю.G\'vb{ Q7~xEjsu׼P%D?KQ~1me +qsz1X²jVvܐZݩneaʦ ZSnV ZZq"u!PI%ΡHXr,_ KBmq3"tG$j!F&(%}cԮ ˑJ]+c҈3] Hǔ1P H$q]5+T' lSBf֯l7%WKL&4ԌO*  hH<:+e*p,H=H/Y40o4rުNBqPKf{mi$`MUۆZZw~lQ_>u`.|F}:jIK[K½f9̔ru=n-LK|sZMNG-B-VcLR(QsV+qңR]%cɂ-J_N+,J?>Rw谠db &1,[϶[aks(>יTW?sx|:L=5R: T,QP-rEbhńocb"<0q0, #?+)ZfsѲR7)M=k(U+{iSb _J"J?}Jkm[ V ܧYr2K(ȫ^ L {C];+e#=8J^-ʚ|7h793VVK`4V>r ie &ZKՆgcNs'w^ wJ.oh|xsؖg/"ц)ZSK)Z^KM&ZSnDm˧[nN&l>@qWB<(QjqcwaK:aϘѻ4WD"0LHE|Ezx0aZݾ6d2H"*rJ#Kq\'L؜&jK([.ɌhCQbxFHz-R!F $k#`)9I򌺄4\8C@5ҭ`T߯HӯbED(2 UNJ7+;\|5d8z5q09)ˤ^קZP,R00FCsY\i7HH&ʿ_2{M%@;jZHB0]OŔ3f U֛1nhWPg3`KRrv~@06T?'1i%N=83Uv#, FJ 58Ow}~ʏ]|mo5 [EgHc6SUp剱ʽϏWK֋~%cn<5&..m/6Tٻ]n>Ͼ|[gp&sm" Cdw4*jG4bW{n;Yh-zd(7Nn$͊.^eDy%RrKj OAC}Ԍ`32) Zfxh!RuGתRt^|Jk}LMMʹf?^>7P{1b A@oGW*a(QPkg) ^Kδq#F޿4?Oy ^٥\.zc=AQ.>OC۸Z1*~*ĵV,1+ۊZsEYohǓ^7S,ʨpz?CWx0Wl!X@g n*Yi$ ZhVm$O!&/wʠbo=Dm fDn Ki~S(}Oru|zỤ,R٧(]TJU ][NCeI !pO,úVY(](dp`Fp9\ÓMe.!;ZW=x`脀vl{}cEdGO]߽Kdy ͏8ha%}MX[y/[/⸡E^R>#GBa%Vtl9HHe +(y Y77 Gม6Bj?22͹x-wB-$} C6,^PqÚgc5% Q@sD0Hcv!u*T}w·t ޅtK×?ڊ]<|W4Mɠ)IChPRF5lyǏyo%ҽIu(z8}~03$(b$L[vs=v_n\$ֶUO$BUphT32@[_o% o]LٵS^n+^ͅc~w3]I]-r Y,^L^8p_|V2.ʫј=_%z4F^lk5? >U\nM-$ (R)r~R7PsjCR^8:0?-)\u]s-̔I)3YJىHGP߬C-襱ϙK)geR"woUΫ=-K:@RM^Uѯt#.i)N섅`cn`EWTVtY8ـ͔ j%l1HrѺ}{6/kwmzQ(jXە=Uo%]ZvCV%NN.\v7NZz891QE!qĈ1v9!`I]r,$cVNa*"s@j54FG+O^HuTmàoZsTaZ/Yl S x%T P!Q'AGP9[[nOb*eυ>^a~3{N_˭"~lyy2HK)i"i ¿HnnHƏZK<+/ FP7=*s>Tq@ݐM&j2DʞPtQS&+x,3JeQQk$ρr.) I(GL+h/)"jƏQĤIuj&TXY!RR]H`Ki(E)jJPaW#A=z(nAZPm,Sx\~k ,۽y&͊Rec*zxk'(6t굓\wEu@ ŇK^^O.+ZQ(z`⮩rͩ2& SV]1m8NiWb5ȉVr,`HivU@-ZRyrgdd/ZDSGEQÿbcT/ Ji#fDeξ~ޔ/M 토 i*ZȖwr8KVo(]AjA?%΁ D邲tA G@9IP Oc~K+?p>r~jַ)_qf83yl|[#V-d(%K)jKUIYs!qnd^#k4$e": L G::LUBVmWUI5%]C?~M!!P5sjs4n4wj#eV35,XH>aY!h6^}SL2=`(zxn:\1<r k7;w v#;"Ӛ._Qgr<**BO߽3FmAi{Jjtn]NW|x}#h_:i #'N@$Bnኼ[5,n@L(*I_{JZ-s_\Ģ-8{FY: x7ӞOkfLCa8ar`sSqۗi7ڞT/RGbp"\ErCc9cGr#[Om X`/d[%9999nKd\Q,PlreJ@6UQ&znuSJ׾k! EYܲ^uKΚw\r'4σwB DbTJVHb|V&|VX+ ݼZmZDպw M8l\v*I#9? DJis(|#TTL QYA$bnRbPP^m! õNfKˎ<jl2E|oBF{XIL/e N"5pe\k:—*#ɡ^E9CgFzcbMJ<xs.!syբNcF}duy<=Z 4'%@ھ#s뎎yT&jPa 8b5$)@\)Uߖ@ >}(c@ax4Q/sSG ޓ$&<%^۠W eى> 7V1ji^wٷ8%hM"NkN8hE CyW[7Τ`?~g{" M.R ,lj4Ɍ3I*θ?}U5[kNx0F ɝND)d]858弊D1)2NM;[".UB /"4?8{jP{0Oo^aV!<K NIf$HFm |GXr"L$4>cVJ+K gDstX\e Cɼ04~&<"' .HA+J@_={̃R/H,k!c@3sFs^yA EyO^/¥:¥# = + :h=n{mp-(ڠ`\DF8qIk1^j][s+,=m,H/ڭԞrlNp"sHYR%f(N|F_F5;?+ZR$w3; ~  ,c/PEf1z}?2b>< dRa{XHs< 3qrNWd +B3I20;f2B P{E37ʒ=&dW8)`]Y9o3&1ZP8)4B Z_y0^-=qIGPAZoEUhRᶟBvs~gcU÷\ fW8(]B =_~ 8~Yz:ljjH楢V2w6juZ!qPSl|}!- x|OR;n0lCNקq^گtyn|}8`k@d @LK n nY$ ܅:Z+'L  l[]{`,X OnRPfO "OV2uGL;6M~t K9ޘk E&pGL3'>BsL'|T;d DIp Wl{yݵ۷Qk+wӂj3'0I=%Pu.#Ιis*e@zs3;5a#1j#ΝB~\ θRk(nψkQH4yVs"X;':IVoh @Y/'QQ4p]~.[p)g]䡿exh@a;jn.ʘw4A+ԈB꘳wy@c`WLh<91tg4}uEEIwAnj0=7Kd5{=btxa1fNL9X12ELyT)F.sUw۸+01_iM>c!3+`HO փ+oQ,yha)WnSv/` ā,p"H K91-fYgdF--r,blPI0'5Ɔ& #Rt| ڄ.]/) V5b8PA5ҦδQ̺ztK#+`؋h, x*\f2Px{J~iRU|~N.՞P鬻Gk/Z$@Uˌuh|6"'^ &Eߣ Q60\QXc徒1rFBx&YI`NK-g оw"Bx8@zMc\B oSߖ[AQILH,qk)h_87c߃w'S&SE5F):*HTF>FuFo Or:Zfuh5 fH'j9j=hw^ӼҞMXI{JŵlǴSp;KP9vRJ~28q*O6Ȝt=)RӾs7Wew>iNV?rCJ3 8\ZbPIW4 Nw hJ!\L `Fkw*O3~hrsq+&"\ ';A,IcY|Ԫ㮔NK*W+ʸ9m+:KOqj}t$Qa<4tr=?0ŏ;,aww7'v̘b41HXURE%==T9%8}ˮk?`mUj]ͯlU)D/'yhEdKR^:BRNi9C-(ڳ<˳23%m‹NaxWJgƍy~k %4w| Ԩ?q7#aeø)Ln-.U0mΕ$ )]O50/:|فP\90zajRzGm($.a]vP&u읫KK^3yf~x5uܾAc 56 dHa?24jy9\N <,6o PGj0^@ժE9yd1?o2诋z[Sc|Y].T#,hbT/A/ɻuƉ}l g^&|D㛚5zD߽yoH.wjxMWڟSiɲ?ɬwMf\$W=x&;mY(6Fkß_Y5՜'%~Xr^帙)Rе nwz8q7@|M=I|l9IO/-ݬ c/zH.Dиpb,HdϽ8>@_LW!RDX|ݍC$Nz-_7/#K'@XֻcZHR AgE‹́;|1)@9hoy;ZVNSkCrStj݃*A>;_QKxS6Dp#~qd02Mҫi"%cu5|\q*2|" _DaYR 8Ø.]P1h\qY7x&Z43NUѝ?Uwo\AY.fYk}LaD[hzb_z`VhbෳWw{3;U+<-rUz:Ԫ~w#'_7 _ zU6ݚ::|xW;S9C,څv+:ts!ݺҠ t}GaS_LuT!rSƞ9t$l4h:]F}> T)bҭ3Цҭyh?oXRby`lw?_1؆ULǃov:6!q]ewe n$6a}cj.VYZDDS>}1 ~뀿O}(rUmml\T\ɮ_ren ;,3]nL \ f agA.ЦE% ,){nʷJMV_OsN6R25+\if7_N6"p=$|ug%䳟gK t㥠5-lFA5sqe==(Mӥ k„#yMVJ)]iMv~a,;T'3pU5ev] /׼5e^ጶa,pW r%kc" 2[~(ā0JVw&pYԵ؉M?ﻢTn ,`RSl5Mfx7]»f܄91g4Y3RE^BzS5FOW?^팫RiMi>{5~<%Ձdsp8!bkh8&N( ).z jBuNdrDB76]+[6럷Y}}cwJ^/Ks^ͼY@˧WTdbCZ5ŖS߄E{\t$x"kT2UrM3H)+OXTjG7H)O)cB@{mW|Z1hZ)Kл!` tp ߕp5|P}df렵dlWWEoѷz nnӕY]PY"p{w|gmK+Lm}fȶŭ֏SRӝZAn z[/լ:|JƭLpZk?zTamxJOf" A!ou+qkl=kz 8ZxL=$oU<_#xݶ`CN=!ЌS4[ܮC Ӓx7?>QQIc#p dFjkb{\}(1%.s%ۨ/Rea%%(쐊J%XUaRI&D"tiGΔk.VmE@+dRȐnm+iU)4Sb DM%eRCĿV%K*&ZUm:;}05 Xb}`Jόf}q0}q+ѭefFKEұ:R&V+1 EJv!qfAkAe}"E$渜In׮=-|}cϿZ~3F7?}^/W}^q8>-RZ_~2__a[k>Lr˥է/pf͗(,οop==΂ujƿnz%GKQǓVzMOm%qZE/N"ng+Hac!wKkGd@W()e{h책= rb"9-CؽYj53ktd6 仇0{ZϤzGbk[F7KoآvUyuhf=tzEk|O,"͂V/엨.Gz#/J[ZJxsn\ѵ6FFGK;g1>'Ÿ޿cgmFm+fI'O7dwOppyÉDW?=ߞs 'x2Dh'xh<փ-E>Xkz`O~hC'W&J?Bp۵9'1j p;޾k?>Ř23ewl3fb*FD/zB^);nޅ6T}Lgwlݞj:uѢg=MtMyzՎne+1j'Xz|m&MMG0S3 ~ΛA fd͘܈7AwɅ'ΎX^WYH\訊c[g\s@Q"$2dڻky)?kӉf0lwttԡBf녖AJKX#1$q6?u(r4h,Z3@F02wnFzZQ[2Tو6 b[ ђWNCƏY kJH'}8BYX "{Y)fƽ;.6:) <OOߋwo}Ӈ3hh`3$]R4[wmO~X id?9 j3Qv[F*NPD檀 U#E0%bC{Gis} QRN:D ,įV9Io4ޭFiec=H 6dOX^*-;6)";mٮ 2[WY3qi_g[L(؁V T|6#ؕm)JjCݻ}jBN.l8xűi|\8>rn]$A%84m:#')pKޚ+ \VHC>>X,NO₧Vp9;EAǥ X֘t_^k kvJDsMm]>jv#8S瀚/'!"ž"6Iz5qmKH}LH waE@ o;~yolأoŢ(Ff*ObW| *qupmcm")qY{2~8?d(_[0c͉/߈:́0.d[A):+)nԻ@7.OlM=.[g҇:yíUb|N23>վb͔ӐRWX ?rv=vgw2-#l>u]I| ,4T}P9~>re{ܡi4 HdY}d$X$VpxvEk˞IgU+1qsI[3m}H\%.spޙPŠ´= c*JZEfoņb^ILn 9KŐ@D)!!5Q̉s/y/Ue~ ~Țjv ~{ ~22E"Ҹd)NXzWH]r-8^YcgBJ)~2˴5g>*ƽf\,.ʍ]?h1L)PVx^0amy wu+q__Շ5ku_:DC InȑƼf+q ^78ߍ+fLa4\^s|$QbC $;B70D^e $ܶto[oN.onzknG!؅(Q@e[a%ϙFT^Q+%e@ sȬ##reJYQlsi5Ʌ<8DqBU@K@`V8eZW8jJh74Rj'@FhpZ[I W "+ Q^aNK*PQX<"F7иIBzI'0S0 ; YP;\4^\dWTl ><죟 g)^T?Q_l rEd,N@U~RGv@#cMۿ߲kX|fKS"CI 6U~u0'j0_ȷrzhZ1 ,cRʔ5hkƣ˿lߎn@Ur]*3{wVt1,}zЦ#?}ɋ[^] PPeI`Wr)7i`'}; {;rJv~6fUfM{ g`3BFl )Rʙo70J+tFlfkQl7W/l:hz@Cz>vC2EELk0q4ڔcMy}TξOו\ϡEqE_+y&RC2Dفeu~fǣJa%!<`>zbRXe_ACR_nI8K]Y1d˱g6n[?\Mnm!,`ۀEx_!'L-nk*]2a=ucEGA;Oˏ Ng'b*L~ E(^:l- SRg/gULXrC&=0>|薊A뤾It;`"0.; DF.<䙻hkT X3o;GW5M{ZиjF*H2YvrYՆc p\xwZq@yyMlQZC!T8Ja:m}!4") ʇOm|(T'cG N_LӒX$kQ?zZu4q*_WrCذ~+W_2V@Cu7>4 VvqP.ow+/ |7Hf:0Yr׀_<7 h#hERb4z%{ZYw%jRpPf8Lfa*ԘD˃EONےQZrIYⰁ 7 ?|ZrVc{J8qc؁USNd%,(+-V^;Zsj tG7~'u)V>0^?*\Ha?fU#ŏ 0u 0./b`6/ 1>MB#^n IXuj|M;.ޕ1 CGcvF\0JsjL"Slna.UP;GR KT4,;I{(8=ht6% q+ ;##0KQB +Z]pUfOîLbH[i$aɷ-cL4ki%RS":jy9C9ywI{B`(K^Cj> O*1)hs46'dC&dG[*1&՗Bޢ[Nօe.'8 s,Ody{#kh l\^Y#fps%FBM)Q9e~b$v|ڙ>1;Ju,E 3ʽVB=Pٜ GF _2__!]_E SlwIk;|B\2ߨ_ƓO~HPYeS<}6tu{ myʃ-+/{wVvw_KkyO~SdM.| ًzG?,ޝ ?D= 0])[.tߵ-cK 'QLi9|TKlE`y0DT I %H 3;`K}%`} bfG"6GQIHuwoKy$Og`o"9= /}HMA~[^Q zhs|^ԗ[R+N^M{)$(/]]m|h*A<:P|ʩ.MnMC1JM RKAJƦ棃C. var/home/core/zuul-output/logs/kubelet.log0000644000000000000000005452476115137231723017717 0ustar rootrootJan 30 21:16:49 crc systemd[1]: Starting Kubernetes Kubelet... Jan 30 21:16:49 crc restorecon[4696]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 30 21:16:49 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 30 21:16:50 crc restorecon[4696]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 30 21:16:51 crc kubenswrapper[4721]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 30 21:16:51 crc kubenswrapper[4721]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 30 21:16:51 crc kubenswrapper[4721]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 30 21:16:51 crc kubenswrapper[4721]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 30 21:16:51 crc kubenswrapper[4721]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 30 21:16:51 crc kubenswrapper[4721]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.733745 4721 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747031 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747064 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747074 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747084 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747093 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747101 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747110 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747122 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747150 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747159 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747168 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747176 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747185 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747193 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747200 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747209 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747217 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747226 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747234 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747243 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747251 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747259 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747267 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747275 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747282 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747290 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747336 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747344 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747352 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747359 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747367 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747374 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747382 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747390 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747398 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747405 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747413 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747421 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747431 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747440 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747452 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747460 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747467 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747480 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747510 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747519 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747527 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747535 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747543 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747552 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747560 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747594 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747603 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747613 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747621 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747630 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747637 4721 feature_gate.go:330] unrecognized feature gate: Example Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747645 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747653 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747660 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747668 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747679 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747688 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747697 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747707 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747716 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747724 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747736 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747744 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747752 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.747774 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.747970 4721 flags.go:64] FLAG: --address="0.0.0.0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.747988 4721 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748024 4721 flags.go:64] FLAG: --anonymous-auth="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748036 4721 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748067 4721 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748077 4721 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748089 4721 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748100 4721 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748110 4721 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748122 4721 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748132 4721 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748141 4721 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748150 4721 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748159 4721 flags.go:64] FLAG: --cgroup-root="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748168 4721 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748178 4721 flags.go:64] FLAG: --client-ca-file="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748187 4721 flags.go:64] FLAG: --cloud-config="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748196 4721 flags.go:64] FLAG: --cloud-provider="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748204 4721 flags.go:64] FLAG: --cluster-dns="[]" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748222 4721 flags.go:64] FLAG: --cluster-domain="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748231 4721 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748240 4721 flags.go:64] FLAG: --config-dir="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748249 4721 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748259 4721 flags.go:64] FLAG: --container-log-max-files="5" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748269 4721 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748279 4721 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748288 4721 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748328 4721 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748338 4721 flags.go:64] FLAG: --contention-profiling="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748347 4721 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748356 4721 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748366 4721 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748375 4721 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748385 4721 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748394 4721 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748403 4721 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748416 4721 flags.go:64] FLAG: --enable-load-reader="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748425 4721 flags.go:64] FLAG: --enable-server="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748434 4721 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748447 4721 flags.go:64] FLAG: --event-burst="100" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748456 4721 flags.go:64] FLAG: --event-qps="50" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748501 4721 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748511 4721 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748521 4721 flags.go:64] FLAG: --eviction-hard="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748532 4721 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748544 4721 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748553 4721 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748563 4721 flags.go:64] FLAG: --eviction-soft="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748572 4721 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748582 4721 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748591 4721 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748601 4721 flags.go:64] FLAG: --experimental-mounter-path="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748610 4721 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748620 4721 flags.go:64] FLAG: --fail-swap-on="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748629 4721 flags.go:64] FLAG: --feature-gates="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748640 4721 flags.go:64] FLAG: --file-check-frequency="20s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748650 4721 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748659 4721 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748668 4721 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748678 4721 flags.go:64] FLAG: --healthz-port="10248" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748687 4721 flags.go:64] FLAG: --help="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748696 4721 flags.go:64] FLAG: --hostname-override="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748704 4721 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748714 4721 flags.go:64] FLAG: --http-check-frequency="20s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748723 4721 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748732 4721 flags.go:64] FLAG: --image-credential-provider-config="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748741 4721 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748750 4721 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748763 4721 flags.go:64] FLAG: --image-service-endpoint="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748772 4721 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748781 4721 flags.go:64] FLAG: --kube-api-burst="100" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748790 4721 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748800 4721 flags.go:64] FLAG: --kube-api-qps="50" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748809 4721 flags.go:64] FLAG: --kube-reserved="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748818 4721 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748826 4721 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748836 4721 flags.go:64] FLAG: --kubelet-cgroups="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748845 4721 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748855 4721 flags.go:64] FLAG: --lock-file="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748863 4721 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748872 4721 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748882 4721 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748895 4721 flags.go:64] FLAG: --log-json-split-stream="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748905 4721 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748913 4721 flags.go:64] FLAG: --log-text-split-stream="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748922 4721 flags.go:64] FLAG: --logging-format="text" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748931 4721 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748941 4721 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748950 4721 flags.go:64] FLAG: --manifest-url="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748958 4721 flags.go:64] FLAG: --manifest-url-header="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748970 4721 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.748979 4721 flags.go:64] FLAG: --max-open-files="1000000" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749000 4721 flags.go:64] FLAG: --max-pods="110" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749009 4721 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749019 4721 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749027 4721 flags.go:64] FLAG: --memory-manager-policy="None" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749036 4721 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749045 4721 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749055 4721 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749064 4721 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749090 4721 flags.go:64] FLAG: --node-status-max-images="50" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749099 4721 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749109 4721 flags.go:64] FLAG: --oom-score-adj="-999" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749118 4721 flags.go:64] FLAG: --pod-cidr="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749127 4721 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749152 4721 flags.go:64] FLAG: --pod-manifest-path="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749161 4721 flags.go:64] FLAG: --pod-max-pids="-1" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749176 4721 flags.go:64] FLAG: --pods-per-core="0" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749185 4721 flags.go:64] FLAG: --port="10250" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749194 4721 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749204 4721 flags.go:64] FLAG: --provider-id="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749213 4721 flags.go:64] FLAG: --qos-reserved="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749223 4721 flags.go:64] FLAG: --read-only-port="10255" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749233 4721 flags.go:64] FLAG: --register-node="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749242 4721 flags.go:64] FLAG: --register-schedulable="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749250 4721 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749272 4721 flags.go:64] FLAG: --registry-burst="10" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749282 4721 flags.go:64] FLAG: --registry-qps="5" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749291 4721 flags.go:64] FLAG: --reserved-cpus="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749326 4721 flags.go:64] FLAG: --reserved-memory="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749338 4721 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749346 4721 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749355 4721 flags.go:64] FLAG: --rotate-certificates="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749364 4721 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749373 4721 flags.go:64] FLAG: --runonce="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749382 4721 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749391 4721 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749400 4721 flags.go:64] FLAG: --seccomp-default="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749409 4721 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749418 4721 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749427 4721 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749436 4721 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749449 4721 flags.go:64] FLAG: --storage-driver-password="root" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749460 4721 flags.go:64] FLAG: --storage-driver-secure="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749470 4721 flags.go:64] FLAG: --storage-driver-table="stats" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749479 4721 flags.go:64] FLAG: --storage-driver-user="root" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749487 4721 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749497 4721 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749506 4721 flags.go:64] FLAG: --system-cgroups="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749514 4721 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749591 4721 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749603 4721 flags.go:64] FLAG: --tls-cert-file="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749612 4721 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749623 4721 flags.go:64] FLAG: --tls-min-version="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749632 4721 flags.go:64] FLAG: --tls-private-key-file="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749641 4721 flags.go:64] FLAG: --topology-manager-policy="none" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749650 4721 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749660 4721 flags.go:64] FLAG: --topology-manager-scope="container" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749669 4721 flags.go:64] FLAG: --v="2" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749682 4721 flags.go:64] FLAG: --version="false" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749697 4721 flags.go:64] FLAG: --vmodule="" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749709 4721 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.749721 4721 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750086 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750106 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750119 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750127 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750135 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750144 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750154 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750165 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750175 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750183 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750191 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750205 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750215 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750225 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750232 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750240 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750248 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750256 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750264 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750272 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750280 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750288 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750323 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750332 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750339 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750347 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750355 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750362 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750371 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750379 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750387 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750395 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750403 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750411 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750419 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750426 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750435 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750443 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750451 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750460 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750470 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750480 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750489 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750501 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750511 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750521 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750531 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750540 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750549 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750559 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750567 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750576 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750587 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750599 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750609 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750618 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750627 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750636 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750645 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750653 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750661 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750668 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750676 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750683 4721 feature_gate.go:330] unrecognized feature gate: Example Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750692 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750700 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750707 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750715 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750723 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750730 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.750738 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.750750 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.766062 4721 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.766134 4721 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766250 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766263 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766271 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766276 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766283 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766290 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766327 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766336 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766343 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766350 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766357 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766364 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766370 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766377 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766383 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766389 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766396 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766402 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766408 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766413 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766419 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766425 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766433 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766443 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766449 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766457 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766463 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766470 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766475 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766480 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766485 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766491 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766496 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766501 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766506 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766511 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766516 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766521 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766526 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766531 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766536 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766541 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766546 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766553 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766559 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766565 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766570 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766576 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766581 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766586 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766591 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766597 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766603 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766610 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766616 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766622 4721 feature_gate.go:330] unrecognized feature gate: Example Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766628 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766635 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766640 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766647 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766653 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766658 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766664 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766669 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766674 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766679 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766684 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766689 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766694 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766699 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766704 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.766713 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766880 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766894 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766901 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766908 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766914 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766922 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766929 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766937 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766943 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766950 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766959 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766964 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766972 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766978 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766985 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.766993 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767001 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767007 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767013 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767020 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767027 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767033 4721 feature_gate.go:330] unrecognized feature gate: Example Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767039 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767045 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767051 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767058 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767064 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767071 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767077 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767083 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767089 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767095 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767101 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767107 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767113 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767120 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767126 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767135 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767141 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767182 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767191 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767198 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767206 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767213 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767221 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767368 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767376 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767381 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767387 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767392 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767397 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767405 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767411 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767416 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767421 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767426 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767432 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767437 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767442 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767447 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767452 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767458 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767465 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767470 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767476 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767482 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767487 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767492 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767497 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767503 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.767509 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.767517 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.768735 4721 server.go:940] "Client rotation is on, will bootstrap in background" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.776920 4721 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.777453 4721 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.782460 4721 server.go:997] "Starting client certificate rotation" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.782519 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.785506 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-18 13:36:18.216701529 +0000 UTC Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.785740 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.816692 4721 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 30 21:16:51 crc kubenswrapper[4721]: E0130 21:16:51.822720 4721 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.823334 4721 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.850712 4721 log.go:25] "Validated CRI v1 runtime API" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.929057 4721 log.go:25] "Validated CRI v1 image API" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.931792 4721 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.939564 4721 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-30-21-12-23-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.939612 4721 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.971448 4721 manager.go:217] Machine: {Timestamp:2026-01-30 21:16:51.966506384 +0000 UTC m=+0.758407670 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:dce14e0b-51ff-48a1-84fb-60746c76c1b3 BootID:d55cd7e2-7ca0-4ee4-9f64-b636d350d409 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e9:ca:06 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e9:ca:06 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:76:1a:09 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:db:ee:0d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:fd:00:f8 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ee:c1:77 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:8e:f1:96:85:33:0e Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ea:73:22:f7:c7:d1 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.971734 4721 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.972060 4721 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.972550 4721 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.972805 4721 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.972847 4721 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.973126 4721 topology_manager.go:138] "Creating topology manager with none policy" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.973144 4721 container_manager_linux.go:303] "Creating device plugin manager" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.973880 4721 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.973923 4721 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.974986 4721 state_mem.go:36] "Initialized new in-memory state store" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.975110 4721 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.983355 4721 kubelet.go:418] "Attempting to sync node with API server" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.983403 4721 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.983443 4721 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.983466 4721 kubelet.go:324] "Adding apiserver pod source" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.983486 4721 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.989513 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:51 crc kubenswrapper[4721]: E0130 21:16:51.989603 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:51 crc kubenswrapper[4721]: W0130 21:16:51.989570 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:51 crc kubenswrapper[4721]: E0130 21:16:51.989740 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.990988 4721 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.992483 4721 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 30 21:16:51 crc kubenswrapper[4721]: I0130 21:16:51.996179 4721 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001663 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001714 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001735 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001756 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001788 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001807 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001826 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001855 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001878 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001897 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001939 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.001960 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.004473 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.005657 4721 server.go:1280] "Started kubelet" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.005641 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.007598 4721 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.007593 4721 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.008640 4721 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 30 21:16:52 crc systemd[1]: Started Kubernetes Kubelet. Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.010493 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.010547 4721 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.010719 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 19:34:05.071853994 +0000 UTC Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.010799 4721 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.010918 4721 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.010939 4721 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.011201 4721 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.011841 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.011906 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.012730 4721 server.go:460] "Adding debug handlers to kubelet server" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.012766 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="200ms" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.013563 4721 factory.go:55] Registering systemd factory Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.013590 4721 factory.go:221] Registration of the systemd container factory successfully Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.018524 4721 factory.go:153] Registering CRI-O factory Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.018747 4721 factory.go:221] Registration of the crio container factory successfully Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.018990 4721 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.019147 4721 factory.go:103] Registering Raw factory Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.019281 4721 manager.go:1196] Started watching for new ooms in manager Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.020986 4721 manager.go:319] Starting recovery of all containers Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.023487 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.20:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188f9ed4034d9797 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 21:16:52.005566359 +0000 UTC m=+0.797467665,LastTimestamp:2026-01-30 21:16:52.005566359 +0000 UTC m=+0.797467665,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.038992 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.039380 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.039573 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.039734 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.039878 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.040070 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.040253 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.040490 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.040672 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.040826 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.040978 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.041107 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.041266 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.041518 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.041696 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.041863 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042057 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042220 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042406 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042550 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042688 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042816 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.042965 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.043101 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.043244 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.043420 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.043593 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.043769 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.043944 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.044138 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.044275 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.044496 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.044688 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.044851 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.044984 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.045241 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.045411 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.045556 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.045727 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.045890 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.046030 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.046206 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.046372 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.046515 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.047928 4721 manager.go:324] Recovery completed Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049354 4721 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049426 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049453 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049476 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049505 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049529 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049548 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049569 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049589 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049624 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049658 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049688 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049711 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049735 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049761 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049790 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049814 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049834 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049863 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049888 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049915 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049942 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049970 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.049994 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050018 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050042 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050068 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050093 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050119 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050145 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050169 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050196 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050228 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050255 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050280 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050343 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050375 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050402 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050429 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050453 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050482 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050534 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050560 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050592 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050617 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050646 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050672 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050702 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050731 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050757 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050783 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050809 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050837 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050879 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050906 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050934 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050960 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.050988 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051014 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051043 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051073 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051110 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051142 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051172 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051204 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051235 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051266 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051330 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051367 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051398 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051427 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051454 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051480 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051508 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051534 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051562 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051589 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051613 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051640 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051662 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051683 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051702 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051721 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051741 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051763 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051782 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051801 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051820 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051838 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051858 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051877 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051899 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051923 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051949 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051973 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.051992 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052036 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052057 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052078 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052098 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052117 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052137 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052156 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052177 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052197 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052217 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052236 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052256 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052277 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052324 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052346 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052366 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052387 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052406 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052428 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052446 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052469 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052489 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052509 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052528 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052549 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052568 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052587 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052608 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052628 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052649 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052670 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052692 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052718 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052741 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052767 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052792 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052812 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052836 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052864 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052894 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052919 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052938 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052962 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.052986 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053011 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053037 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053059 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053082 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053113 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053139 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053164 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053190 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053220 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053246 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053271 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053333 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053364 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053388 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053415 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053441 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053467 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053495 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053522 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053550 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053577 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053604 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053629 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053656 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053682 4721 reconstruct.go:97] "Volume reconstruction finished" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.053701 4721 reconciler.go:26] "Reconciler: start to sync state" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.061791 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.064384 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.064447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.064466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.065836 4721 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.065864 4721 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.066022 4721 state_mem.go:36] "Initialized new in-memory state store" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.082196 4721 policy_none.go:49] "None policy: Start" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.083460 4721 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.083522 4721 state_mem.go:35] "Initializing new in-memory state store" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.087440 4721 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.090811 4721 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.090896 4721 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.090960 4721 kubelet.go:2335] "Starting kubelet main sync loop" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.091048 4721 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.094806 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.094919 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.111284 4721 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.144613 4721 manager.go:334] "Starting Device Plugin manager" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.144685 4721 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.144699 4721 server.go:79] "Starting device plugin registration server" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.145206 4721 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.145225 4721 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.145416 4721 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.145496 4721 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.145504 4721 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.156435 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.191906 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.192016 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.196326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.196376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.196389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.196558 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.197011 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.197133 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.197666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.197703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.197719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.197827 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.198026 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.198113 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199171 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199513 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199574 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.199623 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201388 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201406 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.201513 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202833 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.202885 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.203819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.203868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.203887 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.213820 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="400ms" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.245538 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.246868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.246941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.246974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.247010 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.247737 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.20:6443: connect: connection refused" node="crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256050 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256204 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256264 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256366 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256424 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256468 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256524 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256638 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256660 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256701 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256718 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256734 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256771 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.256789 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357421 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357487 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357525 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357545 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357563 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357579 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357611 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357630 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357651 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357659 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357733 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357857 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357882 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357784 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357795 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357762 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357819 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357980 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.357801 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358050 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358015 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358118 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358145 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358164 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358186 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358249 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.358255 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.448378 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.449992 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.450045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.450063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.450096 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.450728 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.20:6443: connect: connection refused" node="crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.525791 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.540170 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.566023 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.585422 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.591472 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-4eb24a6648b7fb1624b59b701607a7668b79fc992c77c4a69c3a0ded03f4839c WatchSource:0}: Error finding container 4eb24a6648b7fb1624b59b701607a7668b79fc992c77c4a69c3a0ded03f4839c: Status 404 returned error can't find the container with id 4eb24a6648b7fb1624b59b701607a7668b79fc992c77c4a69c3a0ded03f4839c Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.592392 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.592639 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a7ebfbb35c339610a6517919a37cf5aaa93348064d542e2ebec96ad884555c84 WatchSource:0}: Error finding container a7ebfbb35c339610a6517919a37cf5aaa93348064d542e2ebec96ad884555c84: Status 404 returned error can't find the container with id a7ebfbb35c339610a6517919a37cf5aaa93348064d542e2ebec96ad884555c84 Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.603514 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-a7b90e16fdead012c1f0fb810a7a5f5e9d0b1af0d9f9f17d84ca038591f0d139 WatchSource:0}: Error finding container a7b90e16fdead012c1f0fb810a7a5f5e9d0b1af0d9f9f17d84ca038591f0d139: Status 404 returned error can't find the container with id a7b90e16fdead012c1f0fb810a7a5f5e9d0b1af0d9f9f17d84ca038591f0d139 Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.608273 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-7a673a7709dd9406b34f31e3497a9c3fb758d354ef18315c39392df9b30e0fa1 WatchSource:0}: Error finding container 7a673a7709dd9406b34f31e3497a9c3fb758d354ef18315c39392df9b30e0fa1: Status 404 returned error can't find the container with id 7a673a7709dd9406b34f31e3497a9c3fb758d354ef18315c39392df9b30e0fa1 Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.613762 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-cc907f73e91957ec54cf961f00df103a474e7e0dcc6c39b0fbcc4607abd620e9 WatchSource:0}: Error finding container cc907f73e91957ec54cf961f00df103a474e7e0dcc6c39b0fbcc4607abd620e9: Status 404 returned error can't find the container with id cc907f73e91957ec54cf961f00df103a474e7e0dcc6c39b0fbcc4607abd620e9 Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.615028 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="800ms" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.851452 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.853208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.853242 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.853252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:52 crc kubenswrapper[4721]: I0130 21:16:52.853271 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.853737 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.20:6443: connect: connection refused" node="crc" Jan 30 21:16:52 crc kubenswrapper[4721]: W0130 21:16:52.969103 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:52 crc kubenswrapper[4721]: E0130 21:16:52.969235 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.006965 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.010982 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 18:10:13.308766039 +0000 UTC Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.098349 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a7b90e16fdead012c1f0fb810a7a5f5e9d0b1af0d9f9f17d84ca038591f0d139"} Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.100249 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a7ebfbb35c339610a6517919a37cf5aaa93348064d542e2ebec96ad884555c84"} Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.101963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4eb24a6648b7fb1624b59b701607a7668b79fc992c77c4a69c3a0ded03f4839c"} Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.103056 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cc907f73e91957ec54cf961f00df103a474e7e0dcc6c39b0fbcc4607abd620e9"} Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.104348 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7a673a7709dd9406b34f31e3497a9c3fb758d354ef18315c39392df9b30e0fa1"} Jan 30 21:16:53 crc kubenswrapper[4721]: E0130 21:16:53.416984 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="1.6s" Jan 30 21:16:53 crc kubenswrapper[4721]: W0130 21:16:53.493763 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:53 crc kubenswrapper[4721]: E0130 21:16:53.493906 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:53 crc kubenswrapper[4721]: W0130 21:16:53.521519 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:53 crc kubenswrapper[4721]: E0130 21:16:53.521672 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.654492 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.657279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.657400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.657421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.657510 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:16:53 crc kubenswrapper[4721]: E0130 21:16:53.658471 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.20:6443: connect: connection refused" node="crc" Jan 30 21:16:53 crc kubenswrapper[4721]: W0130 21:16:53.659252 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:53 crc kubenswrapper[4721]: E0130 21:16:53.659384 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:53 crc kubenswrapper[4721]: I0130 21:16:53.894988 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 30 21:16:53 crc kubenswrapper[4721]: E0130 21:16:53.897531 4721 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.007281 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.011402 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 14:09:56.670705007 +0000 UTC Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.112114 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.112203 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.112227 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.114147 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e" exitCode=0 Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.114276 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.114369 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.115791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.115838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.115859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.116598 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="5af3cc5328494b22f5b9c088d552a32660fd1f88a82ff15c62351a7f2fb989d1" exitCode=0 Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.116755 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.116854 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"5af3cc5328494b22f5b9c088d552a32660fd1f88a82ff15c62351a7f2fb989d1"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.117852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.117902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.117918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.118489 4721 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66" exitCode=0 Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.118560 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.118574 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.119663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.119689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.119698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.120848 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e" exitCode=0 Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.120886 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e"} Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.120942 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.122243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.122284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.122317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.127075 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.128169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.128206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:54 crc kubenswrapper[4721]: I0130 21:16:54.128218 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:54 crc kubenswrapper[4721]: W0130 21:16:54.970835 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:54 crc kubenswrapper[4721]: E0130 21:16:54.971240 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.007536 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.012074 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 14:25:07.017224363 +0000 UTC Jan 30 21:16:55 crc kubenswrapper[4721]: E0130 21:16:55.018342 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="3.2s" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.126479 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2aeb4c8eb7a6438319ea7064767a1d12873fb564a054b889148fdaa48288919a"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.126552 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.127484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.127517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.127528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.129828 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.129852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.129863 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.130008 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.131759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.131811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.131828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.134183 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.134241 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.134251 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.134260 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.140279 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.140339 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.141194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.141227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.141240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.143910 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b" exitCode=0 Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.143962 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b"} Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.144014 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.150825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.150854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.150867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.258661 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.260842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.260882 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.260895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.260922 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:16:55 crc kubenswrapper[4721]: E0130 21:16:55.261593 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.20:6443: connect: connection refused" node="crc" Jan 30 21:16:55 crc kubenswrapper[4721]: W0130 21:16:55.362909 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:55 crc kubenswrapper[4721]: E0130 21:16:55.363001 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:55 crc kubenswrapper[4721]: I0130 21:16:55.705348 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:16:55 crc kubenswrapper[4721]: W0130 21:16:55.811452 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.20:6443: connect: connection refused Jan 30 21:16:55 crc kubenswrapper[4721]: E0130 21:16:55.811591 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.20:6443: connect: connection refused" logger="UnhandledError" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.012410 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 18:02:10.205877809 +0000 UTC Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.151277 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584" exitCode=0 Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.151850 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.151378 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584"} Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.154941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.155058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.155104 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.159977 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668"} Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.160076 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.160117 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.160144 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.160169 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.160267 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.161333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.161371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.161382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.161597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.161631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.161641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.162245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.162356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.162377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.162256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.162431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:56 crc kubenswrapper[4721]: I0130 21:16:56.162442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.012567 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 20:32:42.000988595 +0000 UTC Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167747 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e"} Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167797 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807"} Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167807 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69"} Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167816 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff"} Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167821 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167891 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.167975 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.168975 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.169088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.169105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.169984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.170037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.170057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.512286 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:57 crc kubenswrapper[4721]: I0130 21:16:57.834657 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.013575 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 01:05:03.027728469 +0000 UTC Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.120725 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.178013 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a"} Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.178091 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.178137 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.179131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.179167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.179182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.179493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.179532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.179556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.229117 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.320983 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.321293 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.322775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.322841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.322858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.462687 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.464382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.464453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.464467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.464572 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.705662 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 21:16:58 crc kubenswrapper[4721]: I0130 21:16:58.705793 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.014501 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 09:23:18.632551824 +0000 UTC Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.181063 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.181064 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.182205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.182248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.182258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.182463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.182503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.182528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:16:59 crc kubenswrapper[4721]: I0130 21:16:59.736731 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 30 21:17:00 crc kubenswrapper[4721]: I0130 21:17:00.015392 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 22:28:52.013203866 +0000 UTC Jan 30 21:17:00 crc kubenswrapper[4721]: I0130 21:17:00.183728 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:00 crc kubenswrapper[4721]: I0130 21:17:00.184970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:00 crc kubenswrapper[4721]: I0130 21:17:00.185023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:00 crc kubenswrapper[4721]: I0130 21:17:00.185039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:01 crc kubenswrapper[4721]: I0130 21:17:01.016528 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 10:11:06.910583674 +0000 UTC Jan 30 21:17:01 crc kubenswrapper[4721]: I0130 21:17:01.562388 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:01 crc kubenswrapper[4721]: I0130 21:17:01.562571 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:01 crc kubenswrapper[4721]: I0130 21:17:01.563714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:01 crc kubenswrapper[4721]: I0130 21:17:01.563757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:01 crc kubenswrapper[4721]: I0130 21:17:01.563773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.017615 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 20:12:41.280642643 +0000 UTC Jan 30 21:17:02 crc kubenswrapper[4721]: E0130 21:17:02.156592 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.539125 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.539253 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.540364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.540418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.540430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:02 crc kubenswrapper[4721]: I0130 21:17:02.547049 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.018729 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 15:28:03.310044907 +0000 UTC Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.194625 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.194953 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.195989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.196067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.196094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:03 crc kubenswrapper[4721]: I0130 21:17:03.202455 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:04 crc kubenswrapper[4721]: I0130 21:17:04.019634 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 13:57:37.161368368 +0000 UTC Jan 30 21:17:04 crc kubenswrapper[4721]: I0130 21:17:04.203544 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:04 crc kubenswrapper[4721]: I0130 21:17:04.205180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:04 crc kubenswrapper[4721]: I0130 21:17:04.205253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:04 crc kubenswrapper[4721]: I0130 21:17:04.205274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.019831 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 07:31:21.962061193 +0000 UTC Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.206943 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.208447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.208517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.208538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.644050 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.644259 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.645633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.645662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.645672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:05 crc kubenswrapper[4721]: W0130 21:17:05.830648 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 30 21:17:05 crc kubenswrapper[4721]: I0130 21:17:05.830736 4721 trace.go:236] Trace[1785188060]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Jan-2026 21:16:55.829) (total time: 10001ms): Jan 30 21:17:05 crc kubenswrapper[4721]: Trace[1785188060]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:17:05.830) Jan 30 21:17:05 crc kubenswrapper[4721]: Trace[1785188060]: [10.001567413s] [10.001567413s] END Jan 30 21:17:05 crc kubenswrapper[4721]: E0130 21:17:05.830759 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 30 21:17:06 crc kubenswrapper[4721]: I0130 21:17:06.007940 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 30 21:17:06 crc kubenswrapper[4721]: I0130 21:17:06.020220 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 12:22:11.796984404 +0000 UTC Jan 30 21:17:06 crc kubenswrapper[4721]: I0130 21:17:06.774281 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Jan 30 21:17:06 crc kubenswrapper[4721]: I0130 21:17:06.774368 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 30 21:17:06 crc kubenswrapper[4721]: I0130 21:17:06.780453 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Jan 30 21:17:06 crc kubenswrapper[4721]: I0130 21:17:06.780528 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.021017 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 04:50:32.730940974 +0000 UTC Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.214553 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.217242 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668" exitCode=255 Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.217280 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668"} Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.217440 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.218131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.218159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.218169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.218678 4721 scope.go:117] "RemoveContainer" containerID="7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668" Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.524459 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]log ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]etcd ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-startkubeinformers ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-api-request-count-filter ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/generic-apiserver-start-informers ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/priority-and-fairness-config-consumer ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/priority-and-fairness-filter ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-apiextensions-informers ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-apiextensions-controllers ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/crd-informer-synced ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-system-namespaces-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-cluster-authentication-info-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-legacy-token-tracking-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-service-ip-repair-controllers ok Jan 30 21:17:07 crc kubenswrapper[4721]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Jan 30 21:17:07 crc kubenswrapper[4721]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/priority-and-fairness-config-producer ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/bootstrap-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/start-kube-aggregator-informers ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-status-local-available-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-status-remote-available-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-registration-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-wait-for-first-sync ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-discovery-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/kube-apiserver-autoregistration ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]autoregister-completion ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-openapi-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: [+]poststarthook/apiservice-openapiv3-controller ok Jan 30 21:17:07 crc kubenswrapper[4721]: livez check failed Jan 30 21:17:07 crc kubenswrapper[4721]: I0130 21:17:07.524532 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.022133 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 23:44:16.613797955 +0000 UTC Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.221321 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.226152 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8"} Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.226288 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.227016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.227071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.227093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.705850 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 21:17:08 crc kubenswrapper[4721]: I0130 21:17:08.705942 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 21:17:09 crc kubenswrapper[4721]: I0130 21:17:09.022863 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 13:37:45.706070556 +0000 UTC Jan 30 21:17:09 crc kubenswrapper[4721]: I0130 21:17:09.911510 4721 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 30 21:17:10 crc kubenswrapper[4721]: I0130 21:17:10.023331 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 08:37:25.31125059 +0000 UTC Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.024254 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 11:13:52.73681382 +0000 UTC Jan 30 21:17:11 crc kubenswrapper[4721]: E0130 21:17:11.771129 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.775653 4721 trace.go:236] Trace[1247946745]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Jan-2026 21:17:00.246) (total time: 11529ms): Jan 30 21:17:11 crc kubenswrapper[4721]: Trace[1247946745]: ---"Objects listed" error: 11529ms (21:17:11.775) Jan 30 21:17:11 crc kubenswrapper[4721]: Trace[1247946745]: [11.529045103s] [11.529045103s] END Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.775682 4721 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 30 21:17:11 crc kubenswrapper[4721]: E0130 21:17:11.775831 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.776001 4721 trace.go:236] Trace[145790045]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Jan-2026 21:16:58.368) (total time: 13407ms): Jan 30 21:17:11 crc kubenswrapper[4721]: Trace[145790045]: ---"Objects listed" error: 13407ms (21:17:11.775) Jan 30 21:17:11 crc kubenswrapper[4721]: Trace[145790045]: [13.407554297s] [13.407554297s] END Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.776026 4721 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.776126 4721 trace.go:236] Trace[2045354799]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Jan-2026 21:17:01.027) (total time: 10748ms): Jan 30 21:17:11 crc kubenswrapper[4721]: Trace[2045354799]: ---"Objects listed" error: 10748ms (21:17:11.775) Jan 30 21:17:11 crc kubenswrapper[4721]: Trace[2045354799]: [10.748299009s] [10.748299009s] END Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.776148 4721 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.776649 4721 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.783232 4721 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.813549 4721 csr.go:261] certificate signing request csr-66htc is approved, waiting to be issued Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.829544 4721 csr.go:257] certificate signing request csr-66htc is issued Jan 30 21:17:11 crc kubenswrapper[4721]: I0130 21:17:11.996719 4721 apiserver.go:52] "Watching apiserver" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.002931 4721 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.003634 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.004217 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.004332 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.004569 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.004816 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.004847 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.004898 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.004963 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.005203 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.005258 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.006362 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.006797 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.006880 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.007368 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.007373 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.007603 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.007806 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.008800 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.009541 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.011961 4721 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.024679 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 00:06:32.340468178 +0000 UTC Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.049620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.063557 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.074476 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079318 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079359 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079382 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079400 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079414 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079433 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079463 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.079886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080016 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080174 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080351 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080389 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080441 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080497 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080513 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080532 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080549 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080568 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080591 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080615 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080630 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080631 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080664 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080681 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080696 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080715 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080750 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080767 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080784 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080845 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080865 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080895 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080911 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080932 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080949 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080969 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080993 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081017 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081036 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081059 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081164 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081188 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081213 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081235 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081257 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081273 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081290 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081350 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081372 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081393 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081419 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081435 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081451 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081468 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081484 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081521 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081540 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081569 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081586 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081618 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081635 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.080797 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081228 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081435 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081613 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081701 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081914 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081967 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082066 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082120 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082210 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082524 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082416 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.082629 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:17:12.582565362 +0000 UTC m=+21.374466648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082775 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.082815 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083015 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083094 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083128 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083165 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083240 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083403 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083430 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083481 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083664 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083676 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.081681 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083766 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083817 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083853 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083900 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083940 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083983 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083992 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.083990 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084026 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084068 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084075 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084099 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084104 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084150 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084186 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084195 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084229 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084238 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084263 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084284 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084323 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084342 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084360 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084377 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084397 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084413 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084429 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084445 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084466 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084483 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084502 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084518 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084536 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084551 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084551 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084568 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084647 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084689 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084725 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084764 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084803 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084841 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084877 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084914 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084953 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084992 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085033 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085072 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085108 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085147 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085184 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085223 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085260 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088683 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088740 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088775 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088800 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088826 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088850 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088870 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088903 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088926 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088947 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088969 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088989 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089011 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089034 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089063 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089088 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089107 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089131 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089155 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089176 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089199 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089220 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089243 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089261 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089283 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089380 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089403 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089426 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089444 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089467 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089489 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089511 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089530 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089551 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089574 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089671 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089697 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089720 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089746 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089768 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089793 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089813 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089830 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089851 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089871 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089908 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089928 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089947 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089967 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089987 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090007 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090031 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090057 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090079 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090095 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090117 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090138 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090160 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090181 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090202 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090224 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090243 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090263 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090308 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090326 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090345 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090365 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090389 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090425 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090450 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090471 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090494 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090516 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090533 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090580 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090603 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090628 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090658 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090704 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090726 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090750 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090809 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090851 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090883 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090911 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090934 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091056 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091085 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091174 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091218 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091243 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091267 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091285 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091617 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091672 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091700 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091795 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091835 4721 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091852 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091865 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091877 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091892 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091905 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091918 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091934 4721 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092286 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092380 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092410 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092435 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092482 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092512 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092542 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092580 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092605 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.092628 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.105863 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108685 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.109231 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085111 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.085205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.084508 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.087611 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.087715 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.087831 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.087864 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088439 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088493 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.088656 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089410 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089453 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.089462 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090081 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090384 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090612 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.090625 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.091107 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.097027 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.097025 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.097838 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.097940 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098113 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098354 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098393 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098595 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098776 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098922 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.109836 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.109962 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.109332 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.099273 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.099946 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.100228 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.100701 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.101037 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.101239 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.110604 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.102126 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.102233 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.102335 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.102518 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.102635 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.102791 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103267 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103487 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103501 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103638 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103785 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103783 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.110731 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.110764 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.103835 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104021 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104024 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.110862 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.110940 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104326 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104425 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104654 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104531 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104527 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104781 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104964 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.104821 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.105211 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.105263 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.105366 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.105672 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.105750 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.106219 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.106338 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.111346 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.111503 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.111523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.106370 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.106471 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.098766 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.106534 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.111527 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.106288 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.097660 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.106981 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.107602 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.107902 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.107915 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108316 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108250 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108335 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108342 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.107119 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108424 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108541 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108592 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108704 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108753 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108739 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108770 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.108993 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.109077 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.111536 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:12.611474596 +0000 UTC m=+21.403375882 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.112869 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.112948 4721 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.112971 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.112986 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113002 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113017 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113028 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113040 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113051 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113065 4721 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113075 4721 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113085 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113096 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113108 4721 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113120 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113130 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.113139 4721 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.114995 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.115100 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:12.615077891 +0000 UTC m=+21.406979137 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.115469 4721 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.118900 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.122430 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.122472 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.122973 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.123815 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.123819 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.124236 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.124573 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.125332 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.125344 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.125532 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.126080 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.127025 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.128484 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.128507 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.128524 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.128573 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.128590 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:12.628569593 +0000 UTC m=+21.420470849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.128950 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.129171 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.130509 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.131131 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.131676 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.131700 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.131719 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.131803 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:12.631779665 +0000 UTC m=+21.423680921 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.132052 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.132760 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.132810 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.133441 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.134533 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.136211 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.136696 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.137539 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.138065 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.138415 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.138511 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.138519 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.138530 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.139640 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.139794 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.140022 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.141421 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.141995 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.142185 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.142351 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.142502 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.143081 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.143227 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.143409 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.143889 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.146526 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.146851 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.147428 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.147514 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.147585 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.147753 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.147845 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.148036 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.148510 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.150021 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.150862 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.151019 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.151046 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.152558 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.152601 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.153740 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.153890 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.154226 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.154693 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.154842 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.154878 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.155260 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.156083 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.156528 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.156537 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.157073 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.157118 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.159662 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.159692 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.159754 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.160049 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.168455 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.170243 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.172247 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.181219 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.183174 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.183997 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.184338 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.185157 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.185254 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.187189 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.189205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.189610 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.190673 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.193466 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.194454 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.195978 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.197417 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.198059 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.199263 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.199962 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.201057 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.201214 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.201603 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.201975 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.202538 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214181 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214251 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214236 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214324 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214341 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214354 4721 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214366 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214378 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214390 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214403 4721 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214414 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214427 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214440 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214451 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214464 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214476 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214488 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214636 4721 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214682 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214711 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.214731 4721 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215683 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215698 4721 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215709 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215721 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215733 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215745 4721 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215757 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215768 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215779 4721 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215791 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215803 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215815 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215827 4721 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215838 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215849 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215861 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215874 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215884 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215896 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215907 4721 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215918 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215929 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215941 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215952 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215963 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215975 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.215989 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216001 4721 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216014 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216026 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216039 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216052 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216064 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216076 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216088 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216099 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216111 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216123 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216133 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216146 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216158 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216170 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216181 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216191 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216203 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216214 4721 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216226 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216238 4721 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216250 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216261 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216271 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216282 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216312 4721 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216323 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216333 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216343 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216356 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216366 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216377 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216387 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216397 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216408 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216418 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216429 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216440 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216450 4721 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216462 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216474 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216485 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216496 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216507 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216518 4721 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216529 4721 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216540 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216551 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216563 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216574 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216586 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216598 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216610 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216620 4721 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216643 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216655 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216694 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216707 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216718 4721 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216730 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216746 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216757 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216768 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216779 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216790 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216801 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216850 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216862 4721 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216873 4721 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216883 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216987 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.216999 4721 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217074 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217087 4721 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217100 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217111 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217125 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217135 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217147 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217158 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217169 4721 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217180 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217191 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217202 4721 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217214 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217225 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217238 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217249 4721 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217259 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217278 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217289 4721 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217319 4721 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217335 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217349 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217362 4721 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217373 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217384 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217395 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217406 4721 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217416 4721 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217427 4721 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217437 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217452 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217461 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217473 4721 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217485 4721 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217496 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217507 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217519 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217529 4721 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217540 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.217550 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.227973 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.262963 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.285170 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.320902 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.324982 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.331748 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 21:17:12 crc kubenswrapper[4721]: W0130 21:17:12.343214 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-f85af1dd268e17fe3d3b94f3cc78661ad4df00bb02ee8873d077af1934089183 WatchSource:0}: Error finding container f85af1dd268e17fe3d3b94f3cc78661ad4df00bb02ee8873d077af1934089183: Status 404 returned error can't find the container with id f85af1dd268e17fe3d3b94f3cc78661ad4df00bb02ee8873d077af1934089183 Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.520566 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.521018 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.526715 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.533735 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.536216 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.547656 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.560289 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.573711 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.587504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.600802 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.614755 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.620381 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.620490 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:17:13.62045295 +0000 UTC m=+22.412354196 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.620535 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.620579 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.620619 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.620661 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:13.620649947 +0000 UTC m=+22.412551193 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.620680 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.620728 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:13.620720809 +0000 UTC m=+22.412622055 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.628864 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.644259 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.655555 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.667781 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.680433 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.690822 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.721391 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.721472 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721636 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721677 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721693 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721717 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721740 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721751 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721763 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:13.721743219 +0000 UTC m=+22.513644485 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: E0130 21:17:12.721801 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:13.721786881 +0000 UTC m=+22.513688127 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.831750 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-30 21:12:11 +0000 UTC, rotation deadline is 2026-12-09 04:10:34.307407388 +0000 UTC Jan 30 21:17:12 crc kubenswrapper[4721]: I0130 21:17:12.831811 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7494h53m21.47559854s for next certificate rotation Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.016499 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-jlpj8"] Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.017186 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.020060 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.020097 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.020287 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.025367 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 13:41:36.195692126 +0000 UTC Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.025610 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwbg5\" (UniqueName: \"kubernetes.io/projected/1c455fa6-c3c0-4d87-871a-e038553054c0-kube-api-access-hwbg5\") pod \"node-resolver-jlpj8\" (UID: \"1c455fa6-c3c0-4d87-871a-e038553054c0\") " pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.025647 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1c455fa6-c3c0-4d87-871a-e038553054c0-hosts-file\") pod \"node-resolver-jlpj8\" (UID: \"1c455fa6-c3c0-4d87-871a-e038553054c0\") " pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.032436 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.046098 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.054858 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.067474 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.078782 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.091400 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.101975 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.127375 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwbg5\" (UniqueName: \"kubernetes.io/projected/1c455fa6-c3c0-4d87-871a-e038553054c0-kube-api-access-hwbg5\") pod \"node-resolver-jlpj8\" (UID: \"1c455fa6-c3c0-4d87-871a-e038553054c0\") " pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.127432 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1c455fa6-c3c0-4d87-871a-e038553054c0-hosts-file\") pod \"node-resolver-jlpj8\" (UID: \"1c455fa6-c3c0-4d87-871a-e038553054c0\") " pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.127351 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.127495 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1c455fa6-c3c0-4d87-871a-e038553054c0-hosts-file\") pod \"node-resolver-jlpj8\" (UID: \"1c455fa6-c3c0-4d87-871a-e038553054c0\") " pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.148417 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwbg5\" (UniqueName: \"kubernetes.io/projected/1c455fa6-c3c0-4d87-871a-e038553054c0-kube-api-access-hwbg5\") pod \"node-resolver-jlpj8\" (UID: \"1c455fa6-c3c0-4d87-871a-e038553054c0\") " pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.240686 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f85af1dd268e17fe3d3b94f3cc78661ad4df00bb02ee8873d077af1934089183"} Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.242174 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd"} Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.242220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73"} Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.242232 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4951805c401e643f7102268b5e6b9ff7b357136336aadeab81322ba313da248f"} Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.243632 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b"} Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.243676 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"31fc424df4931846e6240bbb256ca00ecda81a9a90c5415b3dc4e554c60e2932"} Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.274857 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.288847 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.312790 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.337350 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jlpj8" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.342136 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: W0130 21:17:13.349227 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c455fa6_c3c0_4d87_871a_e038553054c0.slice/crio-4acf698c47e3cf5f0778514cd215478dad24ae64e3946a10398c1354d2970319 WatchSource:0}: Error finding container 4acf698c47e3cf5f0778514cd215478dad24ae64e3946a10398c1354d2970319: Status 404 returned error can't find the container with id 4acf698c47e3cf5f0778514cd215478dad24ae64e3946a10398c1354d2970319 Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.371669 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.388487 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.406810 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.423030 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.436653 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.456638 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.469110 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.488476 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.501430 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.517981 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.530201 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.540443 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:13Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.631131 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.631226 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.631290 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.631357 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:17:15.631331292 +0000 UTC m=+24.423232538 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.631421 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.631524 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:15.631472957 +0000 UTC m=+24.423374393 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.631519 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.631935 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:15.631919931 +0000 UTC m=+24.423821177 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.732151 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:13 crc kubenswrapper[4721]: I0130 21:17:13.732215 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732335 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732349 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732360 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732383 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732418 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732431 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732405 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:15.732392414 +0000 UTC m=+24.524293660 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:13 crc kubenswrapper[4721]: E0130 21:17:13.732485 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:15.732467476 +0000 UTC m=+24.524368722 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.026056 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 13:37:49.705470463 +0000 UTC Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.091670 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.091717 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.091891 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:14 crc kubenswrapper[4721]: E0130 21:17:14.091912 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:14 crc kubenswrapper[4721]: E0130 21:17:14.092020 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:14 crc kubenswrapper[4721]: E0130 21:17:14.092083 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.095003 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.095541 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.096856 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.097501 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.098451 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.098933 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.099532 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.100464 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.100957 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.101885 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.102263 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.102856 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.103689 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.104239 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.104663 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.105705 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.106097 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.107150 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.107621 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.108617 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.109058 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.109919 4721 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.110022 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.111812 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.112661 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.247336 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jlpj8" event={"ID":"1c455fa6-c3c0-4d87-871a-e038553054c0","Type":"ContainerStarted","Data":"e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386"} Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.247395 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jlpj8" event={"ID":"1c455fa6-c3c0-4d87-871a-e038553054c0","Type":"ContainerStarted","Data":"4acf698c47e3cf5f0778514cd215478dad24ae64e3946a10398c1354d2970319"} Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.267818 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.283290 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.297371 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.308277 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.325562 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.350629 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.373045 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.392677 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.635484 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-g7fgc"] Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.635925 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.637067 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-5gtkk"] Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.637863 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.638144 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-p24tc"] Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.638637 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.640166 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.640341 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.640906 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2p5n5"] Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.641883 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.641992 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.642140 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.642511 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.642656 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.642807 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.646410 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.647079 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.647990 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.648087 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.648326 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.648342 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.648546 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.648746 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.648756 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.649289 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.649351 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.649653 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.654898 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.671940 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.683191 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.696804 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.712899 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.730314 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741494 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-multus-certs\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741554 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-bin\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741581 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-netd\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741603 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741628 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9mr8\" (UniqueName: \"kubernetes.io/projected/f295c622-6366-498b-b846-24316b3ad5b7-kube-api-access-q9mr8\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741666 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-ovn-kubernetes\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741689 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-k8s-cni-cncf-io\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741710 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-daemon-config\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741731 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741753 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-cni-bin\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741775 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/83a76f34-15d7-45c2-874e-d44709adbd11-rootfs\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741797 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-script-lib\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741819 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-os-release\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741841 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2b454df8-f3d9-4751-a678-923066b3d3e8-cni-binary-copy\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741880 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-os-release\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741927 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-var-lib-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741951 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-log-socket\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741975 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f295c622-6366-498b-b846-24316b3ad5b7-ovn-node-metrics-cert\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.741998 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-socket-dir-parent\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742018 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-kubelet\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-hostroot\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742071 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-systemd\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742093 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-system-cni-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742112 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-cnibin\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742133 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztrr2\" (UniqueName: \"kubernetes.io/projected/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-kube-api-access-ztrr2\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742154 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742187 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-ovn\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742208 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-config\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742231 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/83a76f34-15d7-45c2-874e-d44709adbd11-proxy-tls\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742255 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/83a76f34-15d7-45c2-874e-d44709adbd11-mcd-auth-proxy-config\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742275 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-netns\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742316 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-cnibin\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742475 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq7rt\" (UniqueName: \"kubernetes.io/projected/83a76f34-15d7-45c2-874e-d44709adbd11-kube-api-access-zq7rt\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742510 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-systemd-units\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742611 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-env-overrides\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742690 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-kubelet\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742717 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-slash\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742852 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-etc-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742923 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-conf-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742956 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-system-cni-dir\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.742980 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-cni-binary-copy\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743023 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2b454df8-f3d9-4751-a678-923066b3d3e8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-netns\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743230 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-node-log\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743260 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-cni-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743288 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7s76\" (UniqueName: \"kubernetes.io/projected/2b454df8-f3d9-4751-a678-923066b3d3e8-kube-api-access-q7s76\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743343 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-cni-multus\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743371 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-etc-kubernetes\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.743588 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.757217 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.768271 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.783020 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.797112 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.813396 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.825398 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.837356 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844437 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-slash\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844494 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-etc-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-slash\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844516 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-conf-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844537 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-system-cni-dir\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844556 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-kubelet\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-conf-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844579 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-netns\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844594 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-kubelet\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844598 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-node-log\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844616 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-cni-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844610 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-etc-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844636 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-cni-binary-copy\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844697 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2b454df8-f3d9-4751-a678-923066b3d3e8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7s76\" (UniqueName: \"kubernetes.io/projected/2b454df8-f3d9-4751-a678-923066b3d3e8-kube-api-access-q7s76\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844742 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-cni-multus\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844748 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-cni-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844763 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-etc-kubernetes\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844798 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-multus-certs\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-node-log\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844847 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-multus-certs\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844821 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-etc-kubernetes\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844827 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-bin\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844797 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-netns\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844904 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-netd\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844848 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-bin\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844929 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844949 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-system-cni-dir\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-cni-multus\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844987 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844957 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9mr8\" (UniqueName: \"kubernetes.io/projected/f295c622-6366-498b-b846-24316b3ad5b7-kube-api-access-q9mr8\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845079 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-ovn-kubernetes\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.844994 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-netd\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845102 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-k8s-cni-cncf-io\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845128 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-k8s-cni-cncf-io\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845167 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-ovn-kubernetes\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845170 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-daemon-config\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845231 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-cni-bin\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845276 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/83a76f34-15d7-45c2-874e-d44709adbd11-rootfs\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845307 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-script-lib\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845323 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845371 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-cni-bin\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845380 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-os-release\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845402 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2b454df8-f3d9-4751-a678-923066b3d3e8-cni-binary-copy\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845441 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-var-lib-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-log-socket\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845481 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f295c622-6366-498b-b846-24316b3ad5b7-ovn-node-metrics-cert\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845500 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-os-release\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845522 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-socket-dir-parent\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845545 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-systemd\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845566 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-system-cni-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-cnibin\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845600 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-kubelet\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845620 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-hostroot\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845650 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztrr2\" (UniqueName: \"kubernetes.io/projected/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-kube-api-access-ztrr2\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845402 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/83a76f34-15d7-45c2-874e-d44709adbd11-rootfs\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845680 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845710 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-ovn\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845728 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-config\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845748 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/83a76f34-15d7-45c2-874e-d44709adbd11-proxy-tls\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845764 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/83a76f34-15d7-45c2-874e-d44709adbd11-mcd-auth-proxy-config\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845770 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2b454df8-f3d9-4751-a678-923066b3d3e8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845781 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-netns\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845803 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-cnibin\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845827 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq7rt\" (UniqueName: \"kubernetes.io/projected/83a76f34-15d7-45c2-874e-d44709adbd11-kube-api-access-zq7rt\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845849 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-systemd-units\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845868 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-env-overrides\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845871 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-cni-binary-copy\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.845946 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-os-release\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-daemon-config\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846053 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-system-cni-dir\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846089 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-hostroot\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846097 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-cnibin\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846141 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-os-release\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846179 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-multus-socket-dir-parent\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846205 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-systemd\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846055 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-var-lib-kubelet\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846212 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-script-lib\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846242 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-var-lib-openvswitch\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846264 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-log-socket\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846342 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2b454df8-f3d9-4751-a678-923066b3d3e8-cni-binary-copy\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846361 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-ovn\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846378 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-systemd-units\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846386 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-host-run-netns\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846408 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-cnibin\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846513 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-env-overrides\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846696 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2b454df8-f3d9-4751-a678-923066b3d3e8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.846912 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/83a76f34-15d7-45c2-874e-d44709adbd11-mcd-auth-proxy-config\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.847059 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-config\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.859859 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f295c622-6366-498b-b846-24316b3ad5b7-ovn-node-metrics-cert\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.859963 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/83a76f34-15d7-45c2-874e-d44709adbd11-proxy-tls\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.867813 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.867948 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9mr8\" (UniqueName: \"kubernetes.io/projected/f295c622-6366-498b-b846-24316b3ad5b7-kube-api-access-q9mr8\") pod \"ovnkube-node-2p5n5\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.867948 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztrr2\" (UniqueName: \"kubernetes.io/projected/62d4c2ec-791a-4f32-8ba0-118cac4e72e5-kube-api-access-ztrr2\") pod \"multus-g7fgc\" (UID: \"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\") " pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.867963 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7s76\" (UniqueName: \"kubernetes.io/projected/2b454df8-f3d9-4751-a678-923066b3d3e8-kube-api-access-q7s76\") pod \"multus-additional-cni-plugins-5gtkk\" (UID: \"2b454df8-f3d9-4751-a678-923066b3d3e8\") " pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.872560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq7rt\" (UniqueName: \"kubernetes.io/projected/83a76f34-15d7-45c2-874e-d44709adbd11-kube-api-access-zq7rt\") pod \"machine-config-daemon-p24tc\" (UID: \"83a76f34-15d7-45c2-874e-d44709adbd11\") " pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.883519 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.895376 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.910372 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.924765 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.939071 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.948498 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-g7fgc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.957646 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" Jan 30 21:17:14 crc kubenswrapper[4721]: W0130 21:17:14.961910 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62d4c2ec_791a_4f32_8ba0_118cac4e72e5.slice/crio-29d53dfe282e40fc7845a667e8d8a38799a1658bff9976eb16e799871b63a1fc WatchSource:0}: Error finding container 29d53dfe282e40fc7845a667e8d8a38799a1658bff9976eb16e799871b63a1fc: Status 404 returned error can't find the container with id 29d53dfe282e40fc7845a667e8d8a38799a1658bff9976eb16e799871b63a1fc Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.963626 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.964073 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:14Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:14 crc kubenswrapper[4721]: I0130 21:17:14.971592 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:14 crc kubenswrapper[4721]: W0130 21:17:14.982683 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b454df8_f3d9_4751_a678_923066b3d3e8.slice/crio-94a3e613f2aae7be618e9663bcb9a51524106feae0944c3bc4da71b26203f565 WatchSource:0}: Error finding container 94a3e613f2aae7be618e9663bcb9a51524106feae0944c3bc4da71b26203f565: Status 404 returned error can't find the container with id 94a3e613f2aae7be618e9663bcb9a51524106feae0944c3bc4da71b26203f565 Jan 30 21:17:14 crc kubenswrapper[4721]: W0130 21:17:14.984741 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83a76f34_15d7_45c2_874e_d44709adbd11.slice/crio-20b54a1191bb8bab993f0b23805236ff95e0e2dac5c423bd7fa1965b4d56c43c WatchSource:0}: Error finding container 20b54a1191bb8bab993f0b23805236ff95e0e2dac5c423bd7fa1965b4d56c43c: Status 404 returned error can't find the container with id 20b54a1191bb8bab993f0b23805236ff95e0e2dac5c423bd7fa1965b4d56c43c Jan 30 21:17:15 crc kubenswrapper[4721]: W0130 21:17:15.000401 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf295c622_6366_498b_b846_24316b3ad5b7.slice/crio-ea98a05c679ef185e8c1e8324f7981eff20cc54258aa6fc5116a944a68d12632 WatchSource:0}: Error finding container ea98a05c679ef185e8c1e8324f7981eff20cc54258aa6fc5116a944a68d12632: Status 404 returned error can't find the container with id ea98a05c679ef185e8c1e8324f7981eff20cc54258aa6fc5116a944a68d12632 Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.026744 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 02:57:28.813200352 +0000 UTC Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.252954 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.253028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.253046 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"20b54a1191bb8bab993f0b23805236ff95e0e2dac5c423bd7fa1965b4d56c43c"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.261223 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerStarted","Data":"99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.261289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerStarted","Data":"29d53dfe282e40fc7845a667e8d8a38799a1658bff9976eb16e799871b63a1fc"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.263729 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.265519 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a" exitCode=0 Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.265587 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.265621 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"ea98a05c679ef185e8c1e8324f7981eff20cc54258aa6fc5116a944a68d12632"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.267529 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerStarted","Data":"3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.267604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerStarted","Data":"94a3e613f2aae7be618e9663bcb9a51524106feae0944c3bc4da71b26203f565"} Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.273617 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.289522 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.304131 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.321168 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.335656 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.348339 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.365167 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.385236 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.400128 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.420928 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.434411 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.449669 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.462897 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.477542 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.549784 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.570121 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.594357 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.618779 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.651279 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.652172 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.652285 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:17:19.6522664 +0000 UTC m=+28.444167636 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.652392 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.652544 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.652609 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:19.652600761 +0000 UTC m=+28.444502007 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.652924 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.653042 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.653084 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:19.653076746 +0000 UTC m=+28.444977992 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.667679 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.670863 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.681472 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.682133 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.687893 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.700614 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.711750 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.716314 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.721987 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.735599 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.754497 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.754684 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.754646 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.754742 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.754753 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.754804 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:19.754791277 +0000 UTC m=+28.546692523 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.754993 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.755040 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.755055 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:15 crc kubenswrapper[4721]: E0130 21:17:15.755142 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:19.755112898 +0000 UTC m=+28.547014144 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.755622 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.772367 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.783260 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.790997 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.802188 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.816170 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.834900 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.849475 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.865333 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.880520 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.899654 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.918099 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.933308 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:15 crc kubenswrapper[4721]: I0130 21:17:15.946703 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:15Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.026864 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 07:18:33.474620843 +0000 UTC Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.092888 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.092991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.092983 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:16 crc kubenswrapper[4721]: E0130 21:17:16.093274 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:16 crc kubenswrapper[4721]: E0130 21:17:16.093095 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:16 crc kubenswrapper[4721]: E0130 21:17:16.093516 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.272422 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b454df8-f3d9-4751-a678-923066b3d3e8" containerID="3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920" exitCode=0 Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.272539 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerDied","Data":"3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.278540 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.278581 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.278593 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.278602 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.278612 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.278622 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5"} Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.299935 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.319733 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.335660 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.364249 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.380882 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.401735 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.417054 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.436419 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.453582 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.466051 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.477543 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.488273 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.500660 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:16 crc kubenswrapper[4721]: I0130 21:17:16.525282 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:16Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.027207 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 13:48:45.742686546 +0000 UTC Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.281907 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerStarted","Data":"0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf"} Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.296633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.316503 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.329876 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.373986 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.393943 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.415633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.436912 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.451272 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.463356 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.473976 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.488010 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.500467 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.511346 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.527125 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.841205 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.870993 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.888364 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.901201 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.912895 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.925846 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.942943 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.963626 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:17 crc kubenswrapper[4721]: I0130 21:17:17.985971 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:17Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.007684 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.027363 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 12:43:57.25385524 +0000 UTC Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.033044 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.048132 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.062858 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.077075 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.092139 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.092139 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.092226 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.092323 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.092410 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.092447 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.096427 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.176588 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.180460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.180516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.180536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.181570 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.189788 4721 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.190196 4721 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.191499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.191627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.191747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.191840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.191978 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.211584 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.215408 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.215438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.215447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.215459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.215468 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.226719 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.231640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.231734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.231789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.231858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.231922 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.250445 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.254537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.254570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.254580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.254594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.254603 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.266981 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.270954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.271000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.271010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.271025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.271034 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.287155 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: E0130 21:17:18.287368 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.288747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.288783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.288797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.288815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.288826 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.293486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.295646 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b454df8-f3d9-4751-a678-923066b3d3e8" containerID="0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf" exitCode=0 Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.295675 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerDied","Data":"0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.314096 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.339467 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.366652 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.390292 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.391195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.391245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.391259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.391275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.391286 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.400958 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.416057 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.436119 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.464561 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.478432 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.494168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.494200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.494208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.494221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.494231 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.494669 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.511045 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.524841 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.536544 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.547010 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.599164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.599226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.599240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.599258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.599275 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.701792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.701848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.701858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.701871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.702517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.794775 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-pmc6z"] Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.795469 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.803681 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.803868 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.804007 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.804384 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.805805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.805837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.805853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.805877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.805895 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.815336 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.828026 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.848968 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.865687 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.883849 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.891410 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/498f7a03-bce3-4988-b1c7-67fb7feb7700-host\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.891473 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8mpn\" (UniqueName: \"kubernetes.io/projected/498f7a03-bce3-4988-b1c7-67fb7feb7700-kube-api-access-k8mpn\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.891639 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/498f7a03-bce3-4988-b1c7-67fb7feb7700-serviceca\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.902982 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.908309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.908334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.908344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.908360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.908370 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:18Z","lastTransitionTime":"2026-01-30T21:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.923595 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.954092 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.977340 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.993123 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/498f7a03-bce3-4988-b1c7-67fb7feb7700-serviceca\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.993189 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/498f7a03-bce3-4988-b1c7-67fb7feb7700-host\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.993215 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8mpn\" (UniqueName: \"kubernetes.io/projected/498f7a03-bce3-4988-b1c7-67fb7feb7700-kube-api-access-k8mpn\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.993407 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/498f7a03-bce3-4988-b1c7-67fb7feb7700-host\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.994553 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/498f7a03-bce3-4988-b1c7-67fb7feb7700-serviceca\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:18 crc kubenswrapper[4721]: I0130 21:17:18.997280 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:18Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.010767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.010811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.010826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.010846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.010859 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.016264 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.022213 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8mpn\" (UniqueName: \"kubernetes.io/projected/498f7a03-bce3-4988-b1c7-67fb7feb7700-kube-api-access-k8mpn\") pod \"node-ca-pmc6z\" (UID: \"498f7a03-bce3-4988-b1c7-67fb7feb7700\") " pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.027830 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 17:39:47.556289117 +0000 UTC Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.035748 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.051496 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.071522 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.085444 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.114463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.114501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.114514 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.114529 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.114542 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.129008 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-pmc6z" Jan 30 21:17:19 crc kubenswrapper[4721]: W0130 21:17:19.212253 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod498f7a03_bce3_4988_b1c7_67fb7feb7700.slice/crio-b3276688737aa2f04519ea7f3dea437b8718c7d96075cd0787d43ac75ae86a4f WatchSource:0}: Error finding container b3276688737aa2f04519ea7f3dea437b8718c7d96075cd0787d43ac75ae86a4f: Status 404 returned error can't find the container with id b3276688737aa2f04519ea7f3dea437b8718c7d96075cd0787d43ac75ae86a4f Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.218586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.218632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.218651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.218677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.218696 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.306153 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b454df8-f3d9-4751-a678-923066b3d3e8" containerID="853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69" exitCode=0 Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.306248 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerDied","Data":"853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.309190 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-pmc6z" event={"ID":"498f7a03-bce3-4988-b1c7-67fb7feb7700","Type":"ContainerStarted","Data":"b3276688737aa2f04519ea7f3dea437b8718c7d96075cd0787d43ac75ae86a4f"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.321238 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.323537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.323581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.323598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.323621 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.323641 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.340054 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.375426 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.397535 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.416085 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.427044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.427096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.427114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.427137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.427153 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.434866 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.457716 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.490814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.512025 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.530999 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.535604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.535657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.535673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.535700 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.535717 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.553074 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.572706 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.593388 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.615703 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.635367 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.638965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.639008 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.639021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.639042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.639057 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.713154 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.713278 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.713379 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.713470 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.713520 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:27.713503764 +0000 UTC m=+36.505405030 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.713588 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:17:27.713578427 +0000 UTC m=+36.505479683 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.713658 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.713686 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:27.71367776 +0000 UTC m=+36.505579026 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.742249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.742499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.742576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.742658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.742732 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.814996 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815048 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815069 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815152 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:27.815129184 +0000 UTC m=+36.607030470 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.815462 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.815578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815758 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815782 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815797 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:19 crc kubenswrapper[4721]: E0130 21:17:19.815858 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:27.815841977 +0000 UTC m=+36.607743233 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.846453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.846522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.846540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.846571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.846592 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.949376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.949428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.949445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.949468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:19 crc kubenswrapper[4721]: I0130 21:17:19.949484 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:19Z","lastTransitionTime":"2026-01-30T21:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.028916 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 07:26:25.191782828 +0000 UTC Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.052946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.053061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.053083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.053121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.053147 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.091956 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.092166 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:20 crc kubenswrapper[4721]: E0130 21:17:20.092390 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.092539 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:20 crc kubenswrapper[4721]: E0130 21:17:20.092816 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:20 crc kubenswrapper[4721]: E0130 21:17:20.092966 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.157105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.157169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.157186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.157212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.157231 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.260702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.260768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.260789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.260820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.260842 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.317971 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b454df8-f3d9-4751-a678-923066b3d3e8" containerID="3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b" exitCode=0 Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.318082 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerDied","Data":"3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.321143 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-pmc6z" event={"ID":"498f7a03-bce3-4988-b1c7-67fb7feb7700","Type":"ContainerStarted","Data":"2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.351998 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.364395 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.364449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.364467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.364493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.364511 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.375572 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.398190 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.422581 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.441754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.467573 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.468332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.468394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.468411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.468438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.468453 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.490248 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.523626 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.549771 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.564265 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.572391 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.572461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.572477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.572504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.572520 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.590164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.613279 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.633007 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.647766 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.667096 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.675313 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.675353 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.675366 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.675381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.675394 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.681666 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.696181 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.711635 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.726273 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.754328 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.773059 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.782112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.782143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.782152 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.782166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.782175 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.785878 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.798899 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.813764 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.824587 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.838561 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.854846 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.865139 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.884073 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.884920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.884948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.884958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.884974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.884985 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.914342 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:20Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.987109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.987172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.987192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.987216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:20 crc kubenswrapper[4721]: I0130 21:17:20.987236 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:20Z","lastTransitionTime":"2026-01-30T21:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.030337 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 21:43:44.126356902 +0000 UTC Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.090275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.090360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.090383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.090412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.090442 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.193037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.193088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.193107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.193131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.193148 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.296461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.296521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.296539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.296562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.296580 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.400065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.400112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.400124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.400141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.400152 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.502524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.502570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.502579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.502594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.502604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.604757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.604801 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.604817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.604835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.604845 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.708023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.708102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.708128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.708155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.708173 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.781968 4721 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.815330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.815399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.815423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.815457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.815482 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.918578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.918653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.918679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.918714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:21 crc kubenswrapper[4721]: I0130 21:17:21.918740 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:21Z","lastTransitionTime":"2026-01-30T21:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.022247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.022328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.022355 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.022383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.022401 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.030607 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 15:15:21.526337836 +0000 UTC Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.091917 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.092033 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.092040 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:22 crc kubenswrapper[4721]: E0130 21:17:22.092531 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:22 crc kubenswrapper[4721]: E0130 21:17:22.092669 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:22 crc kubenswrapper[4721]: E0130 21:17:22.092891 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.116590 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.125464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.125534 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.125552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.125581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.125600 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.142641 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.166582 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.180387 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.211679 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.229319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.229431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.229482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.229508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.229525 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.240824 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.258802 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.273378 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.291764 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.315850 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.331635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.331671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.331683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.331701 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.331713 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.332833 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.333134 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.334966 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.336692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerStarted","Data":"05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.355922 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.374867 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.378039 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.396467 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.411133 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.428012 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.433976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.434039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.434063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.434095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.434122 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.448623 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.468864 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.482123 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.504005 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.528342 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.536982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.537048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.537069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.537094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.537113 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.545757 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.563611 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.580898 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.600136 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.624078 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.640677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.640746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.640764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.640790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.640810 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.653209 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.671009 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.688701 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.709098 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.743871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.743973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.743994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.744024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.744044 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.847814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.847884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.847900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.847927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.847948 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.952767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.952835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.952858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.952888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:22 crc kubenswrapper[4721]: I0130 21:17:22.952906 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:22Z","lastTransitionTime":"2026-01-30T21:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.030950 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 04:40:02.668300508 +0000 UTC Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.055827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.055985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.056010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.056040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.056060 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.160459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.160511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.160530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.160557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.160612 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.264545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.264652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.264673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.264706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.264729 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.340332 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.341283 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.368035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.368109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.368132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.368165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.368185 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.377463 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.393633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.413654 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.455942 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.471266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.471377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.471401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.471438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.471461 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.481484 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.502397 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.521280 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.547502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.572100 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.574885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.574935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.574952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.574976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.574994 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.594840 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.616515 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.635450 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.668711 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.678481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.678535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.678559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.678590 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.678611 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.694114 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.715221 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.735610 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:23Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.782472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.782561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.782590 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.782630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.782657 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.886417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.886499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.886527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.886569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.886600 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.989846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.989927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.989944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.989973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:23 crc kubenswrapper[4721]: I0130 21:17:23.989993 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:23Z","lastTransitionTime":"2026-01-30T21:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.031583 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 12:17:23.487012517 +0000 UTC Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.091460 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.091460 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:24 crc kubenswrapper[4721]: E0130 21:17:24.091738 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:24 crc kubenswrapper[4721]: E0130 21:17:24.091844 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.092620 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:24 crc kubenswrapper[4721]: E0130 21:17:24.092961 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.093960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.094036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.094065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.094120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.094148 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.198598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.198660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.198673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.198694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.198707 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.301957 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.302029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.302047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.302073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.302092 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.349673 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b454df8-f3d9-4751-a678-923066b3d3e8" containerID="05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f" exitCode=0 Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.349868 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerDied","Data":"05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.350219 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.373870 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.398092 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.406776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.406849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.406872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.406901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.406920 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.422332 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.443496 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.462369 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.477905 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.497481 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.510503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.510595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.510616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.510646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.510667 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.520440 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.542370 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.563932 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.586827 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.606807 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.614592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.614648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.614660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.614687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.614702 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.637784 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.659568 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.675218 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:24Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.718122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.718194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.718207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.718228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.718242 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.822418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.822468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.822481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.822500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.822512 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.925969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.926046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.926072 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.926101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:24 crc kubenswrapper[4721]: I0130 21:17:24.926130 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:24Z","lastTransitionTime":"2026-01-30T21:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.034025 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:17:41.891252883 +0000 UTC Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.034820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.034914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.034943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.034980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.035016 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.139449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.139527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.139554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.139587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.139609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.243640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.243710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.243730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.243758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.243775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.347212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.347269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.347287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.347349 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.347370 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.361217 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b454df8-f3d9-4751-a678-923066b3d3e8" containerID="5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8" exitCode=0 Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.361368 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerDied","Data":"5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.361544 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.387175 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.425754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.448420 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.449732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.449816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.449834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.449850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.449863 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.460672 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.471440 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.483275 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.492613 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.507993 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.521124 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.533784 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.545104 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.552477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.552507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.552516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.552530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.552540 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.558397 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.572699 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.583397 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.593064 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:25Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.654874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.654911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.654920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.654932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.654941 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.757292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.757361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.757370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.757385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.757394 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.859938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.859987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.860001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.860017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.860029 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.963126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.963192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.963213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.963236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:25 crc kubenswrapper[4721]: I0130 21:17:25.963254 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:25Z","lastTransitionTime":"2026-01-30T21:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.035025 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 22:25:51.586522027 +0000 UTC Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.066561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.066626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.066647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.066678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.066699 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.091906 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.092004 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.091906 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:26 crc kubenswrapper[4721]: E0130 21:17:26.092128 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:26 crc kubenswrapper[4721]: E0130 21:17:26.092237 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:26 crc kubenswrapper[4721]: E0130 21:17:26.092430 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.169416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.169480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.169497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.169523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.169540 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.272361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.272422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.272442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.272468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.272487 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.375568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.375614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.375631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.375653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.375671 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.377181 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" event={"ID":"2b454df8-f3d9-4751-a678-923066b3d3e8","Type":"ContainerStarted","Data":"2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.399515 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.421591 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.440754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.465759 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.478726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.478787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.478804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.478827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.478847 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.487854 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.499132 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.510771 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.510925 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9"] Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.511375 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: W0130 21:17:26.512728 4721 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert": failed to list *v1.Secret: secrets "ovn-control-plane-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Jan 30 21:17:26 crc kubenswrapper[4721]: E0130 21:17:26.512764 4721 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-control-plane-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-control-plane-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.514033 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.522683 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.533537 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.543139 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.552232 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.567837 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.580702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.580736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.580745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.580762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.580775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.585292 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.596927 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3605450b-d454-45c7-a954-b6eaac208991-env-overrides\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.596968 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3605450b-d454-45c7-a954-b6eaac208991-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.596987 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rcxt\" (UniqueName: \"kubernetes.io/projected/3605450b-d454-45c7-a954-b6eaac208991-kube-api-access-2rcxt\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.597008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3605450b-d454-45c7-a954-b6eaac208991-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.598942 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.608375 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.642453 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.653806 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.666631 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.680688 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.682418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.682468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.682479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.682495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.682505 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.689955 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.697721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3605450b-d454-45c7-a954-b6eaac208991-env-overrides\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.697766 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3605450b-d454-45c7-a954-b6eaac208991-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.697791 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rcxt\" (UniqueName: \"kubernetes.io/projected/3605450b-d454-45c7-a954-b6eaac208991-kube-api-access-2rcxt\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.697821 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3605450b-d454-45c7-a954-b6eaac208991-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.698429 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3605450b-d454-45c7-a954-b6eaac208991-env-overrides\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.698683 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3605450b-d454-45c7-a954-b6eaac208991-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.702876 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.713267 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.716412 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rcxt\" (UniqueName: \"kubernetes.io/projected/3605450b-d454-45c7-a954-b6eaac208991-kube-api-access-2rcxt\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.725359 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.738269 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.754534 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.765344 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.776463 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.785148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.785178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.785187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.785201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.785209 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.787398 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.801928 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.819895 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.833677 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:26Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.888129 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.888168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.888177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.888191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.888201 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.990809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.990854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.990867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.990884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:26 crc kubenswrapper[4721]: I0130 21:17:26.990897 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:26Z","lastTransitionTime":"2026-01-30T21:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.035995 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 17:46:43.792630905 +0000 UTC Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.093564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.093615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.093637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.093660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.093679 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.196001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.196368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.196488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.196585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.196718 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.299142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.299189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.299200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.299217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.299232 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.401142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.401198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.401217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.401241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.401259 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.504107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.504149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.504160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.504178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.504190 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.606660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.606703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.606713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.606728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.606738 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.698522 4721 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-control-plane-metrics-cert: failed to sync secret cache: timed out waiting for the condition Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.698626 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3605450b-d454-45c7-a954-b6eaac208991-ovn-control-plane-metrics-cert podName:3605450b-d454-45c7-a954-b6eaac208991 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:28.198605342 +0000 UTC m=+36.990506598 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-control-plane-metrics-cert" (UniqueName: "kubernetes.io/secret/3605450b-d454-45c7-a954-b6eaac208991-ovn-control-plane-metrics-cert") pod "ovnkube-control-plane-749d76644c-sr2j9" (UID: "3605450b-d454-45c7-a954-b6eaac208991") : failed to sync secret cache: timed out waiting for the condition Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.711501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.711551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.711574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.711602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.711618 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.809765 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.810006 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:17:43.809964602 +0000 UTC m=+52.601865888 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.810158 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.810230 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.810364 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.810435 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.810459 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:43.810435928 +0000 UTC m=+52.602337424 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.810484 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:43.810471059 +0000 UTC m=+52.602372315 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.814711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.814759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.814776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.814796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.814812 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.911500 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.911560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911654 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911686 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911703 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911761 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:43.911739777 +0000 UTC m=+52.703641033 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911660 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911799 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911815 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.911865 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:43.911851101 +0000 UTC m=+52.703752357 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.917079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.917162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.917186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.917216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.917239 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:27Z","lastTransitionTime":"2026-01-30T21:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.981071 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-bkv95"] Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.981926 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:27 crc kubenswrapper[4721]: E0130 21:17:27.982031 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:27 crc kubenswrapper[4721]: I0130 21:17:27.992323 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.012066 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.021872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.021912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.021923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.021942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.021954 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.034002 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.036823 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 16:04:33.293801228 +0000 UTC Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.056454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.072336 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.091902 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.091995 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.091994 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.092091 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.092383 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.092498 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.094719 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.113716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq9db\" (UniqueName: \"kubernetes.io/projected/19fca1ba-eb6d-479c-90ff-e55739aed640-kube-api-access-sq9db\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.113879 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.124930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.124995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.125015 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.125044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.125065 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.136775 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.161403 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.178497 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.191919 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.208135 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.215144 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3605450b-d454-45c7-a954-b6eaac208991-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.215195 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq9db\" (UniqueName: \"kubernetes.io/projected/19fca1ba-eb6d-479c-90ff-e55739aed640-kube-api-access-sq9db\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.215248 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.215396 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.215453 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:28.715437397 +0000 UTC m=+37.507338653 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.227014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.227048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.227059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.227074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.227086 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.230143 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3605450b-d454-45c7-a954-b6eaac208991-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sr2j9\" (UID: \"3605450b-d454-45c7-a954-b6eaac208991\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.240444 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq9db\" (UniqueName: \"kubernetes.io/projected/19fca1ba-eb6d-479c-90ff-e55739aed640-kube-api-access-sq9db\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.242955 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.254336 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.265590 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.278289 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.290749 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.304082 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.314865 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.324193 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.329067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.329112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.329130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.329155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.329173 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: W0130 21:17:28.343162 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3605450b_d454_45c7_a954_b6eaac208991.slice/crio-c253bbddb678bc24aeb6a6d91b329f6bee0c79b98390af2d8bdc4665ecd65de6 WatchSource:0}: Error finding container c253bbddb678bc24aeb6a6d91b329f6bee0c79b98390af2d8bdc4665ecd65de6: Status 404 returned error can't find the container with id c253bbddb678bc24aeb6a6d91b329f6bee0c79b98390af2d8bdc4665ecd65de6 Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.385594 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" event={"ID":"3605450b-d454-45c7-a954-b6eaac208991","Type":"ContainerStarted","Data":"c253bbddb678bc24aeb6a6d91b329f6bee0c79b98390af2d8bdc4665ecd65de6"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.431531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.431577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.431654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.431674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.432232 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.535088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.535137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.535154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.535180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.535196 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.604580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.604635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.604645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.604659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.604669 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.615426 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.619063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.619183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.619272 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.619372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.619496 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.629760 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.632830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.632948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.633037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.633122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.633200 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.645928 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.648896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.648933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.648944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.648960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.648970 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.659116 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.661846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.661908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.661921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.661939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.661950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.676337 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:28Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.676440 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.678034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.678062 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.678070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.678083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.678093 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.719907 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.720129 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:28 crc kubenswrapper[4721]: E0130 21:17:28.720371 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:29.720336921 +0000 UTC m=+38.512238197 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.781482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.781524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.781547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.781579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.781604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.884016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.884072 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.884092 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.884118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.884137 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.986783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.986830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.986843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.986863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:28 crc kubenswrapper[4721]: I0130 21:17:28.986876 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:28Z","lastTransitionTime":"2026-01-30T21:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.037175 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 23:01:36.080497615 +0000 UTC Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.089867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.089922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.089938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.089963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.089980 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.193210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.193427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.193587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.193708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.193797 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.296233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.296283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.296291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.296351 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.296360 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.391143 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/0.log" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.395217 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d" exitCode=1 Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.395374 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.396862 4721 scope.go:117] "RemoveContainer" containerID="c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.397531 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" event={"ID":"3605450b-d454-45c7-a954-b6eaac208991","Type":"ContainerStarted","Data":"a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.397587 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" event={"ID":"3605450b-d454-45c7-a954-b6eaac208991","Type":"ContainerStarted","Data":"d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.398948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.399175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.399389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.399581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.399723 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.413479 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.427415 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.458561 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.484985 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.501485 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.504611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.504687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.504708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.504738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.504759 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.516610 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.539711 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.561160 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.578878 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.600319 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.608043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.608343 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.608610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.608661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.608688 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.623172 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.644129 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.688603 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"253 5975 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632689 5975 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 21:17:27.632841 5975 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632870 5975 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.633893 5975 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 21:17:27.634430 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 21:17:27.634468 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 21:17:27.634520 5975 factory.go:656] Stopping watch factory\\\\nI0130 21:17:27.634517 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 21:17:27.634550 5975 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.706188 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.711747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.711810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.711831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.711861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.711882 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.725592 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.731402 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:29 crc kubenswrapper[4721]: E0130 21:17:29.731614 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:29 crc kubenswrapper[4721]: E0130 21:17:29.731711 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:31.731680748 +0000 UTC m=+40.523582014 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.747808 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.761538 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.776732 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.795335 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.815059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.815085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.815094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.815110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.815121 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.815709 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.833597 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.867251 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"253 5975 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632689 5975 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 21:17:27.632841 5975 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632870 5975 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.633893 5975 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 21:17:27.634430 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 21:17:27.634468 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 21:17:27.634520 5975 factory.go:656] Stopping watch factory\\\\nI0130 21:17:27.634517 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 21:17:27.634550 5975 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.886936 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.911155 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.917837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.917890 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.917903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.917924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.917943 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:29Z","lastTransitionTime":"2026-01-30T21:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.929995 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.944439 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.958913 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.973948 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.985962 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:29 crc kubenswrapper[4721]: I0130 21:17:29.997512 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:29Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.011961 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.021838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.021874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.021883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.021899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.021911 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.022989 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.038239 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 20:04:08.34779489 +0000 UTC Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.046771 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.071966 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.094026 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:30 crc kubenswrapper[4721]: E0130 21:17:30.094157 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.094392 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:30 crc kubenswrapper[4721]: E0130 21:17:30.094454 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.094513 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:30 crc kubenswrapper[4721]: E0130 21:17:30.094578 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.094714 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:30 crc kubenswrapper[4721]: E0130 21:17:30.094939 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.124724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.124772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.124786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.124803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.124816 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.227884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.227951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.227970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.227998 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.228016 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.332465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.332511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.332527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.332550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.332566 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.403894 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/0.log" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.406346 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.406556 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.427424 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.434715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.434780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.434795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.434816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.434828 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.447278 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.463086 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.476693 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.492992 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.509150 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.538672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.538765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.538785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.538812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.538828 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.553368 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"253 5975 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632689 5975 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 21:17:27.632841 5975 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632870 5975 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.633893 5975 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 21:17:27.634430 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 21:17:27.634468 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 21:17:27.634520 5975 factory.go:656] Stopping watch factory\\\\nI0130 21:17:27.634517 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 21:17:27.634550 5975 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.571639 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.593025 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.619877 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.640350 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.642769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.642814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.642829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.642852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.642868 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.657481 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.695781 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.714877 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.734834 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.746084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.746130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.746143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.746170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.746183 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.752676 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.778182 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.849703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.849757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.849773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.849793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.849808 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.953209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.953274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.953291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.953413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:30 crc kubenswrapper[4721]: I0130 21:17:30.953431 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:30Z","lastTransitionTime":"2026-01-30T21:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.038961 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 12:21:10.898834624 +0000 UTC Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.057099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.057181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.057201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.057236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.057258 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.161693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.161762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.161786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.161817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.161840 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.264469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.264525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.264544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.264569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.264590 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.367265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.367341 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.367358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.367379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.367401 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.413512 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/1.log" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.414547 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/0.log" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.420019 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47" exitCode=1 Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.420048 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.420127 4721 scope.go:117] "RemoveContainer" containerID="c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.421540 4721 scope.go:117] "RemoveContainer" containerID="4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47" Jan 30 21:17:31 crc kubenswrapper[4721]: E0130 21:17:31.421868 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.443637 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.465660 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.471212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.471276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.471294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.471354 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.471369 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.488798 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.508975 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.544952 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"253 5975 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632689 5975 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 21:17:27.632841 5975 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632870 5975 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.633893 5975 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 21:17:27.634430 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 21:17:27.634468 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 21:17:27.634520 5975 factory.go:656] Stopping watch factory\\\\nI0130 21:17:27.634517 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 21:17:27.634550 5975 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:30Z\\\",\\\"message\\\":\\\" \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0130 21:17:30.386652 6238 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}\\\\nI0130 21:17:30.386666 6238 services_controller.go:360] Finished syncing service machine-api-operator-machine-webhook on namespace openshift-machine-api for network=default : 3.006006ms\\\\nI0130 21:17:30.386678 6238 services_controller.go:356] Processing sync for service openshift-kube-apiserver-operator/metrics for network=default\\\\nF0130 21:17:30.386707 6238 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.567817 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.574471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.575596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.575623 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.575658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.575686 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.588977 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.602996 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.618268 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.638679 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.654266 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.667504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.678512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.678573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.678592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.678620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.678638 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.683000 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.697709 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.712966 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.734980 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.751990 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:31 crc kubenswrapper[4721]: E0130 21:17:31.752183 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:31 crc kubenswrapper[4721]: E0130 21:17:31.752385 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:35.752358087 +0000 UTC m=+44.544259373 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.767665 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:31Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.781622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.781665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.781681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.781704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.781722 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.885824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.885885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.885948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.885978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.885999 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.989679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.989729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.989745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.989768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:31 crc kubenswrapper[4721]: I0130 21:17:31.989785 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:31Z","lastTransitionTime":"2026-01-30T21:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.039372 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 07:33:51.718061498 +0000 UTC Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.091221 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.091250 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:32 crc kubenswrapper[4721]: E0130 21:17:32.091385 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.091461 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.091280 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:32 crc kubenswrapper[4721]: E0130 21:17:32.091669 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:32 crc kubenswrapper[4721]: E0130 21:17:32.091742 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:32 crc kubenswrapper[4721]: E0130 21:17:32.091816 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.093136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.093178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.093194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.093216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.093234 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.108705 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.130115 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.143747 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.159391 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.182759 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.195841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.195887 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.195900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.195918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.195930 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.203235 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.221008 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.238181 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.253220 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.282850 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9f5f1184a4fed93cb039dffe6fc941b98f2990bc63d822416c7a01e83db8d3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:28Z\\\",\\\"message\\\":\\\"253 5975 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632689 5975 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 21:17:27.632841 5975 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.632870 5975 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 21:17:27.633893 5975 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 21:17:27.634430 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 21:17:27.634468 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 21:17:27.634520 5975 factory.go:656] Stopping watch factory\\\\nI0130 21:17:27.634517 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 21:17:27.634550 5975 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:30Z\\\",\\\"message\\\":\\\" \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0130 21:17:30.386652 6238 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}\\\\nI0130 21:17:30.386666 6238 services_controller.go:360] Finished syncing service machine-api-operator-machine-webhook on namespace openshift-machine-api for network=default : 3.006006ms\\\\nI0130 21:17:30.386678 6238 services_controller.go:356] Processing sync for service openshift-kube-apiserver-operator/metrics for network=default\\\\nF0130 21:17:30.386707 6238 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.296361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.298090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.298133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.298148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.298174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.298187 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.312519 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.326905 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.341989 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.361086 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.379076 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.394465 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:32Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.401199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.401245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.401258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.401275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.401287 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.424253 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/1.log" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.503936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.504002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.504019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.504045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.504062 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.607420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.607457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.607466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.607480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.607491 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.709069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.709106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.709119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.709137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.709150 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.811797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.811848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.811861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.811883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.811896 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.914986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.915055 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.915078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.915106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:32 crc kubenswrapper[4721]: I0130 21:17:32.915126 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:32Z","lastTransitionTime":"2026-01-30T21:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.017897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.017964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.017992 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.018023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.018048 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.039853 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 02:53:34.354026947 +0000 UTC Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.120873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.120923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.120942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.120966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.120984 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.223471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.223526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.223543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.223569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.223585 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.326906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.326970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.326988 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.327016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.327036 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.429931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.429987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.430010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.430077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.430100 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.533283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.533376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.533400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.533428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.533450 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.636054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.636117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.636135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.636159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.636176 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.738553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.738587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.738597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.738609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.738620 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.841252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.841341 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.841364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.841386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.841404 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.943973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.944007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.944019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.944036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:33 crc kubenswrapper[4721]: I0130 21:17:33.944061 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:33Z","lastTransitionTime":"2026-01-30T21:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.040734 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 16:57:05.616684905 +0000 UTC Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.046704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.046754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.046771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.046791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.046806 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.092434 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.092461 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.092468 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:34 crc kubenswrapper[4721]: E0130 21:17:34.092572 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.092436 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:34 crc kubenswrapper[4721]: E0130 21:17:34.092716 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:34 crc kubenswrapper[4721]: E0130 21:17:34.092853 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:34 crc kubenswrapper[4721]: E0130 21:17:34.092946 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.149277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.149354 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.149367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.149404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.149416 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.251906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.251965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.252020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.252045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.252062 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.355412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.355471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.355488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.355513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.355531 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.458160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.458199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.458209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.458228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.458237 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.561753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.561823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.561840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.561868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.561888 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.665677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.665743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.665761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.665785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.665803 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.768522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.768576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.768593 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.768619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.768689 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.870556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.870586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.870596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.870608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.870617 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.973681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.973723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.973743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.973767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:34 crc kubenswrapper[4721]: I0130 21:17:34.973784 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:34Z","lastTransitionTime":"2026-01-30T21:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.041145 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 13:51:58.034883055 +0000 UTC Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.076696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.076763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.076786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.076813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.076836 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.181378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.181452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.181474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.181506 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.181528 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.284854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.284898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.284908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.284923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.284934 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.388071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.388141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.388167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.388198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.388220 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.490624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.490680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.490692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.490713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.490726 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.593599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.593645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.593656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.593675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.593720 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.696578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.696637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.696654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.696684 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.696708 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.798634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:35 crc kubenswrapper[4721]: E0130 21:17:35.798820 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:35 crc kubenswrapper[4721]: E0130 21:17:35.798920 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:43.798893303 +0000 UTC m=+52.590794589 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.799823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.799886 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.799906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.799934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.799956 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.902609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.902644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.902655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.902669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:35 crc kubenswrapper[4721]: I0130 21:17:35.902678 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:35Z","lastTransitionTime":"2026-01-30T21:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.004792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.004836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.004845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.004861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.004870 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.042268 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 12:21:27.847896782 +0000 UTC Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.091792 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.091874 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.091882 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.091926 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:36 crc kubenswrapper[4721]: E0130 21:17:36.091984 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:36 crc kubenswrapper[4721]: E0130 21:17:36.092092 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:36 crc kubenswrapper[4721]: E0130 21:17:36.092208 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:36 crc kubenswrapper[4721]: E0130 21:17:36.092271 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.117063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.117135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.117154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.117181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.117200 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.220331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.220393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.220412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.220438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.220454 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.323713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.323780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.323798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.324007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.324027 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.431730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.431779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.431791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.431807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.431819 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.534935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.535006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.535023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.535048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.535064 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.638603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.638671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.638690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.638719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.638737 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.741798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.741868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.741893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.741925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.741948 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.844732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.844801 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.844818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.844844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.844858 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.948023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.948108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.948131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.948158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:36 crc kubenswrapper[4721]: I0130 21:17:36.948176 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:36Z","lastTransitionTime":"2026-01-30T21:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.043291 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 06:33:40.787794588 +0000 UTC Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.051462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.051531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.051551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.051576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.051593 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.154348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.154418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.154436 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.154459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.154479 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.257389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.257450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.257467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.257489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.257507 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.360714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.360822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.360839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.360866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.360884 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.462975 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.463054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.463079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.463105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.463123 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.566374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.566429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.566447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.566465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.566478 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.669603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.669650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.669663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.669681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.669694 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.773073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.773136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.773154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.773182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.773200 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.876007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.876042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.876052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.876068 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.876078 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.978731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.978764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.978773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.978788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:37 crc kubenswrapper[4721]: I0130 21:17:37.978800 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:37Z","lastTransitionTime":"2026-01-30T21:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.043875 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 20:01:36.990775316 +0000 UTC Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.080943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.081018 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.081041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.081074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.081098 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.091551 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.091621 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.091653 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.091560 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.091750 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.091868 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.091973 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.092104 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.184795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.184867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.184889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.184915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.184931 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.287424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.287465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.287474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.287488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.287500 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.390334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.390378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.390390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.390406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.390417 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.492398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.492454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.492471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.492496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.492514 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.595639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.595705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.595724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.595748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.595767 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.698721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.698775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.698792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.698814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.698832 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.802376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.802444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.802467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.802500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.802521 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.897220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.897284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.897345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.897379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.897398 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.918612 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:38Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.924440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.924510 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.924535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.924560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.924579 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.948395 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:38Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.953445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.953500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.953517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.953539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.953558 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.974053 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:38Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.979049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.979091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.979106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.979125 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:38 crc kubenswrapper[4721]: I0130 21:17:38.979140 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:38Z","lastTransitionTime":"2026-01-30T21:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:38 crc kubenswrapper[4721]: E0130 21:17:38.998495 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:38Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.003521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.003602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.003628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.003660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.003684 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: E0130 21:17:39.023699 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:39Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:39 crc kubenswrapper[4721]: E0130 21:17:39.023947 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.026112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.026157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.026170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.026192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.026205 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.044334 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 00:53:08.728294415 +0000 UTC Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.128978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.129049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.129107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.129135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.129157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.232698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.232736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.232746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.232762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.232777 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.335614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.335693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.335713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.335741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.335759 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.438868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.438940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.438958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.438983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.438999 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.540981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.541044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.541062 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.541086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.541102 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.643717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.643778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.643796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.643821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.643841 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.746685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.746743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.746761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.746784 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.746800 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.849788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.849855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.849873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.849896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.849914 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.952945 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.953005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.953022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.953047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:39 crc kubenswrapper[4721]: I0130 21:17:39.953065 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:39Z","lastTransitionTime":"2026-01-30T21:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.044804 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 02:22:19.118992922 +0000 UTC Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.055956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.056005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.056020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.056048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.056059 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.091758 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.091817 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.091846 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:40 crc kubenswrapper[4721]: E0130 21:17:40.091978 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.092027 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:40 crc kubenswrapper[4721]: E0130 21:17:40.092140 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:40 crc kubenswrapper[4721]: E0130 21:17:40.092266 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:40 crc kubenswrapper[4721]: E0130 21:17:40.093129 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.159052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.159224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.159253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.159279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.159332 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.262494 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.262558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.262574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.262599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.262616 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.366270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.366352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.366369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.366391 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.366407 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.471547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.471643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.471663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.471688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.471705 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.575902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.576002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.576021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.576046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.576064 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.679821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.679866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.679878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.679897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.679909 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.784487 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.784569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.784594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.784625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.784646 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.886916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.886962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.886973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.886991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.887003 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.902257 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.903024 4721 scope.go:117] "RemoveContainer" containerID="4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.932531 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:40Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.952841 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:40Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.972427 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:40Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.988533 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:40Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.992345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.992696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.992730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.992764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:40 crc kubenswrapper[4721]: I0130 21:17:40.992782 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:40Z","lastTransitionTime":"2026-01-30T21:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.020261 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:30Z\\\",\\\"message\\\":\\\" \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0130 21:17:30.386652 6238 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}\\\\nI0130 21:17:30.386666 6238 services_controller.go:360] Finished syncing service machine-api-operator-machine-webhook on namespace openshift-machine-api for network=default : 3.006006ms\\\\nI0130 21:17:30.386678 6238 services_controller.go:356] Processing sync for service openshift-kube-apiserver-operator/metrics for network=default\\\\nF0130 21:17:30.386707 6238 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.037803 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.044929 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 23:14:42.57365766 +0000 UTC Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.062197 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.080987 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.096342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.096396 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.096416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.096438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.096457 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.103416 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.127137 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.151327 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.168749 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.184979 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.199730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.199790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.199808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.199833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.199852 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.203727 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.219985 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.238206 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.272345 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.303602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.303741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.303772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.303815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.303842 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.407069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.407121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.407134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.407156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.407168 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.462845 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/1.log" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.465590 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.466103 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.482765 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.501424 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.509987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.510023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.510031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.510043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.510053 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.516806 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.531511 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.540822 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.553416 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.565961 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.579006 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.597164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.613162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.613229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.613249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.613281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.613328 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.618918 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.634018 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.648515 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.664784 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.678154 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.708252 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:30Z\\\",\\\"message\\\":\\\" \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0130 21:17:30.386652 6238 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}\\\\nI0130 21:17:30.386666 6238 services_controller.go:360] Finished syncing service machine-api-operator-machine-webhook on namespace openshift-machine-api for network=default : 3.006006ms\\\\nI0130 21:17:30.386678 6238 services_controller.go:356] Processing sync for service openshift-kube-apiserver-operator/metrics for network=default\\\\nF0130 21:17:30.386707 6238 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.716706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.716761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.716775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.716798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.716814 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.730095 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.747660 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.820007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.820081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.820101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.820127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.820146 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.922599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.922641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.922651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.922667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:41 crc kubenswrapper[4721]: I0130 21:17:41.922676 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:41Z","lastTransitionTime":"2026-01-30T21:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.025508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.025549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.025559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.025576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.025586 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.046008 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 12:46:17.676370317 +0000 UTC Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.091760 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.091784 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.091914 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.092179 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:42 crc kubenswrapper[4721]: E0130 21:17:42.092496 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:42 crc kubenswrapper[4721]: E0130 21:17:42.092764 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:42 crc kubenswrapper[4721]: E0130 21:17:42.092936 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:42 crc kubenswrapper[4721]: E0130 21:17:42.093147 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.117395 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.128358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.128411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.128431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.128458 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.128477 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.132087 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.148702 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.162135 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.188781 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.210334 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.229915 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.232786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.232863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.232889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.232922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.232945 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.250814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.264816 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.295730 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:30Z\\\",\\\"message\\\":\\\" \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0130 21:17:30.386652 6238 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}\\\\nI0130 21:17:30.386666 6238 services_controller.go:360] Finished syncing service machine-api-operator-machine-webhook on namespace openshift-machine-api for network=default : 3.006006ms\\\\nI0130 21:17:30.386678 6238 services_controller.go:356] Processing sync for service openshift-kube-apiserver-operator/metrics for network=default\\\\nF0130 21:17:30.386707 6238 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.313180 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.335670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.335728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.335747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.335775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.335795 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.337290 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.359836 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.376963 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.397203 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.413994 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.436191 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.438489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.438536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.438556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.438582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.438604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.472860 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/2.log" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.473894 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/1.log" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.478482 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173" exitCode=1 Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.478565 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.478663 4721 scope.go:117] "RemoveContainer" containerID="4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.479753 4721 scope.go:117] "RemoveContainer" containerID="838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173" Jan 30 21:17:42 crc kubenswrapper[4721]: E0130 21:17:42.480343 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.501377 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.518463 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.541915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.541983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.542001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.542031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.542049 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.554366 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.576243 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.595018 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.607780 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.633938 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.645818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.645875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.645895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.645920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.645939 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.658191 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.678001 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.696687 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.715918 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.734454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.749330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.749429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.749465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.749503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.749525 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.767983 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e16de075f1d4123e4c5a9a88d05824df685f250c74118ab7d190eec13a42c47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:30Z\\\",\\\"message\\\":\\\" \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0130 21:17:30.386652 6238 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-machine-webhook\\\\\\\"}\\\\nI0130 21:17:30.386666 6238 services_controller.go:360] Finished syncing service machine-api-operator-machine-webhook on namespace openshift-machine-api for network=default : 3.006006ms\\\\nI0130 21:17:30.386678 6238 services_controller.go:356] Processing sync for service openshift-kube-apiserver-operator/metrics for network=default\\\\nF0130 21:17:30.386707 6238 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.785233 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.805020 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.824637 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.843687 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:42Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.853327 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.853392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.853409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.853434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.853458 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.956457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.956539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.956563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.956596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:42 crc kubenswrapper[4721]: I0130 21:17:42.956619 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:42Z","lastTransitionTime":"2026-01-30T21:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.046833 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 23:34:44.42926097 +0000 UTC Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.059442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.059502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.059520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.059548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.059568 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.162167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.162232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.162249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.162277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.162333 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.266528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.266585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.266609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.266641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.266667 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.370281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.370379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.370398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.370420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.370435 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.474048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.474136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.474169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.474202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.474225 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.485734 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/2.log" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.492120 4721 scope.go:117] "RemoveContainer" containerID="838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173" Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.492461 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.515613 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.538331 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.561573 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.577409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.577476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.577495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.577522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.577540 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.584895 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.604884 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.626507 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.641081 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.658591 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.674340 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.680017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.680081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.680101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.680126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.680145 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.688654 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.707739 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.722818 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.760947 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.783015 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.783063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.783079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.783102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.783122 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.783259 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.803931 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.819427 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.844450 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:43Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.885725 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.885775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.885793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.885816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.885833 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.891069 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.891200 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.891255 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.891384 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.891550 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.891679 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:17:59.891655382 +0000 UTC m=+68.683556658 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.891709 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:18:15.891696483 +0000 UTC m=+84.683597769 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.891860 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.892169 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.892194 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:18:15.892066315 +0000 UTC m=+84.683967761 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.892648 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:18:15.892613123 +0000 UTC m=+84.684514399 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.990058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.990444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.990609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.990786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.990924 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:43Z","lastTransitionTime":"2026-01-30T21:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.992903 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:43 crc kubenswrapper[4721]: I0130 21:17:43.992975 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993228 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993261 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993282 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993394 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:18:15.993371535 +0000 UTC m=+84.785272811 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993713 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993860 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.993989 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:43 crc kubenswrapper[4721]: E0130 21:17:43.994177 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:18:15.994149829 +0000 UTC m=+84.786051115 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.047432 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 12:02:26.047545393 +0000 UTC Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.092132 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:44 crc kubenswrapper[4721]: E0130 21:17:44.092858 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.092354 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:44 crc kubenswrapper[4721]: E0130 21:17:44.093286 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.092433 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.092166 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:44 crc kubenswrapper[4721]: E0130 21:17:44.094530 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:44 crc kubenswrapper[4721]: E0130 21:17:44.094778 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.096472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.096536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.096562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.096589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.096619 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.200409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.200484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.200501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.200528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.200547 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.304250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.304352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.304374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.304405 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.304428 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.407657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.407735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.407759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.407791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.407814 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.510065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.510126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.510143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.510165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.510180 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.613803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.613873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.613893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.613921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.613942 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.717397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.717483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.717503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.717530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.717547 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.820568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.820641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.820665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.820699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.820725 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.923794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.923857 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.923873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.923895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:44 crc kubenswrapper[4721]: I0130 21:17:44.923908 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:44Z","lastTransitionTime":"2026-01-30T21:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.047600 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 09:17:47.531430065 +0000 UTC Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.048595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.048641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.048654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.048679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.048694 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.151255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.151365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.151387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.151417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.151442 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.255024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.255466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.255649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.255812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.255956 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.360561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.360625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.360645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.360676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.360696 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.463850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.463907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.463926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.463954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.463975 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.567278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.567370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.567389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.567415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.567435 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.669765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.670031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.670127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.670246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.670375 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.773365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.773418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.773429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.773450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.773462 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.876454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.876527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.876549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.876575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.876596 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.979678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.979742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.979760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.979783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:45 crc kubenswrapper[4721]: I0130 21:17:45.979800 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:45Z","lastTransitionTime":"2026-01-30T21:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.048956 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 05:29:26.822725125 +0000 UTC Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.081925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.081985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.082002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.082027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.082047 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.091235 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.091312 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.091230 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:46 crc kubenswrapper[4721]: E0130 21:17:46.091464 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:46 crc kubenswrapper[4721]: E0130 21:17:46.091544 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:46 crc kubenswrapper[4721]: E0130 21:17:46.091623 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.091998 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:46 crc kubenswrapper[4721]: E0130 21:17:46.092131 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.185712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.185777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.185794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.185820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.185840 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.289149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.289222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.289240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.289278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.289426 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.392148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.392568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.392706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.392851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.393022 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.496485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.496585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.496608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.497096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.497385 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.600114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.600545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.600653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.600757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.600854 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.703132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.703560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.703717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.703880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.704030 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.807149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.807208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.807229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.807260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.807283 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.910133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.910191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.910207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.910231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:46 crc kubenswrapper[4721]: I0130 21:17:46.910251 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:46Z","lastTransitionTime":"2026-01-30T21:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.013048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.013106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.013125 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.013151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.013170 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.049761 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 08:19:47.490790448 +0000 UTC Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.116762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.117236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.117622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.117928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.118213 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.221076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.221138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.221157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.221180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.221197 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.324293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.324381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.324402 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.324427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.324446 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.427596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.427922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.428071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.428226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.428415 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.531318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.531352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.531361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.531400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.531410 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.634774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.634827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.634862 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.634880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.634894 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.739462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.739511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.739523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.739542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.739555 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.842492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.842576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.842602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.842638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.842663 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.946071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.946146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.946164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.946194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:47 crc kubenswrapper[4721]: I0130 21:17:47.946217 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:47Z","lastTransitionTime":"2026-01-30T21:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.049704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.049756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.049775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.049799 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.050703 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.051001 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 15:16:16.732398553 +0000 UTC Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.091362 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.091432 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.091538 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:48 crc kubenswrapper[4721]: E0130 21:17:48.091528 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:48 crc kubenswrapper[4721]: E0130 21:17:48.091698 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:48 crc kubenswrapper[4721]: E0130 21:17:48.091791 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.091983 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:48 crc kubenswrapper[4721]: E0130 21:17:48.092160 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.154615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.154702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.154721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.154743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.154761 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.258031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.258100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.258122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.258147 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.258166 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.328782 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.342247 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.353730 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.361707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.361767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.361787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.361813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.361830 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.371673 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.394706 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.414433 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.431933 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.456833 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.465212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.465269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.465287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.465338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.465358 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.493060 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.523655 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.548127 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.567879 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.567919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.567932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.567949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.567960 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.569318 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.587564 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.621681 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.635720 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.654568 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.668383 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.670047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.670080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.670090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.670108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.670121 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.689740 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.704664 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:48Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.772747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.772796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.772813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.772834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.772848 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.876481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.876552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.876575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.876601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.876619 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.979529 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.979641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.979669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.979699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:48 crc kubenswrapper[4721]: I0130 21:17:48.979721 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:48Z","lastTransitionTime":"2026-01-30T21:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.051560 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 14:00:02.677154306 +0000 UTC Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.082098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.082485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.082671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.082871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.083044 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.186013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.186066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.186084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.186106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.186124 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.271524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.271592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.271616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.271690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.271719 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: E0130 21:17:49.294259 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:49Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.299565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.299623 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.299642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.299664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.299682 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: E0130 21:17:49.320416 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:49Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.325265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.325358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.325398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.325429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.325447 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: E0130 21:17:49.345351 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:49Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.350485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.350636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.350656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.350680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.350700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: E0130 21:17:49.371421 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:49Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.377191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.377241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.377258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.377283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.377327 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: E0130 21:17:49.397115 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:49Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:49 crc kubenswrapper[4721]: E0130 21:17:49.397365 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.399431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.399483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.399502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.399528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.399550 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.502923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.503005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.503022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.503047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.503064 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.606148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.606180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.606188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.606201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.606209 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.709132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.709191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.709202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.709216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.709226 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.812070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.812119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.812136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.812159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.812176 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.915561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.915639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.915657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.915683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:49 crc kubenswrapper[4721]: I0130 21:17:49.915700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:49Z","lastTransitionTime":"2026-01-30T21:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.019697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.019782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.019807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.019840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.019865 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.052100 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 03:42:51.692531821 +0000 UTC Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.092211 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.092239 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.092247 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.092348 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:50 crc kubenswrapper[4721]: E0130 21:17:50.092521 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:50 crc kubenswrapper[4721]: E0130 21:17:50.092607 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:50 crc kubenswrapper[4721]: E0130 21:17:50.092827 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:50 crc kubenswrapper[4721]: E0130 21:17:50.092969 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.122991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.123068 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.123093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.123125 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.123148 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.226001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.226050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.226062 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.226080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.226093 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.329986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.330042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.330057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.330076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.330092 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.433644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.433699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.433717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.433740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.433757 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.537673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.537754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.537777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.537811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.537836 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.640409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.640471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.640488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.640515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.640532 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.743724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.743800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.743817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.743843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.743863 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.847410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.847498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.847519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.847548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.847566 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.950912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.950983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.951001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.951032 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:50 crc kubenswrapper[4721]: I0130 21:17:50.951051 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:50Z","lastTransitionTime":"2026-01-30T21:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.052543 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 09:24:10.414575169 +0000 UTC Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.055207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.055273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.055291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.055342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.055366 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.159128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.159191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.159207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.159239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.159254 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.262274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.262374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.262389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.262417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.262437 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.365757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.365829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.365848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.365872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.365889 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.468970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.469031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.469051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.469076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.469093 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.572546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.572616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.572635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.572661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.572686 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.675596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.675640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.675651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.675670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.675686 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.779051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.779132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.779151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.779178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.779194 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.883247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.883334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.883347 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.883372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.883389 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.987617 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.987685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.987715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.987750 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:51 crc kubenswrapper[4721]: I0130 21:17:51.987780 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:51Z","lastTransitionTime":"2026-01-30T21:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.053275 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 17:29:57.612041055 +0000 UTC Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.090826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.091086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.091198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.091314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.091424 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.091809 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.091848 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.092085 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.092463 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:52 crc kubenswrapper[4721]: E0130 21:17:52.092454 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:52 crc kubenswrapper[4721]: E0130 21:17:52.092700 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:52 crc kubenswrapper[4721]: E0130 21:17:52.092882 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:52 crc kubenswrapper[4721]: E0130 21:17:52.093040 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.111497 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.123543 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.193889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.193934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.193973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.193993 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.194004 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.194919 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.213392 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.225976 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.239503 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.254316 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.269861 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.282217 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.295503 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.296601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.296667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.296682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.296708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.296723 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.309269 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.325662 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.339174 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.361716 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.375201 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.389802 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.399728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.399776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.399791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.399825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.399842 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.409688 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.427732 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:52Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.502922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.502979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.502999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.503022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.503042 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.606540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.606607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.606628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.606654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.606673 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.710373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.710980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.711013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.711049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.711069 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.814923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.814971 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.814984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.815003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.815015 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.918845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.918902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.918917 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.918938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:52 crc kubenswrapper[4721]: I0130 21:17:52.918952 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:52Z","lastTransitionTime":"2026-01-30T21:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.021856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.021954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.021981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.022016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.022042 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.053875 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 14:59:54.398305948 +0000 UTC Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.125625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.125661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.125669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.125681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.125690 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.228043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.228110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.228126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.228153 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.228170 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.331585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.331640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.331659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.331683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.331701 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.435646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.435765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.435829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.435852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.435872 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.538339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.538388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.538406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.538430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.538535 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.641280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.641345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.641362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.641381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.641394 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.744029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.744089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.744106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.744134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.744157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.846866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.846919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.846940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.846965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.846984 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.950649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.950725 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.950744 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.950770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:53 crc kubenswrapper[4721]: I0130 21:17:53.950789 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:53Z","lastTransitionTime":"2026-01-30T21:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.053354 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.053400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.053411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.053429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.053440 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.054351 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 01:49:45.771283614 +0000 UTC Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.091934 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.091988 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.092068 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.091942 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:54 crc kubenswrapper[4721]: E0130 21:17:54.092117 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:54 crc kubenswrapper[4721]: E0130 21:17:54.092452 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:54 crc kubenswrapper[4721]: E0130 21:17:54.092559 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:54 crc kubenswrapper[4721]: E0130 21:17:54.092634 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.156810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.156897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.156929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.156962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.156983 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.260746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.260806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.260818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.260839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.260851 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.365160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.365229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.365244 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.365268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.365282 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.469342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.469403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.469422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.469447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.469464 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.572223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.572281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.572316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.572336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.572349 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.676979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.677053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.677076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.677106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.677130 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.780521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.780564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.780573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.780587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.780598 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.883073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.883109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.883119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.883134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.883142 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.984674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.984713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.984722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.984744 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:54 crc kubenswrapper[4721]: I0130 21:17:54.984753 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:54Z","lastTransitionTime":"2026-01-30T21:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.054950 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:52:10.224920466 +0000 UTC Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.087006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.087067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.087084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.087110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.087127 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.190191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.190268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.190292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.190354 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.190372 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.293166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.293198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.293207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.293221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.293230 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.406486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.406539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.406547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.406563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.406572 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.510377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.510451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.510471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.510496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.510514 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.613457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.613507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.613523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.613545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.613562 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.716663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.716718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.716735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.716758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.716775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.820142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.820501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.820527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.820554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.820571 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.923002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.923065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.923083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.923109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:55 crc kubenswrapper[4721]: I0130 21:17:55.923128 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:55Z","lastTransitionTime":"2026-01-30T21:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.026710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.026855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.026947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.027014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.027036 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.055910 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 00:22:59.014121055 +0000 UTC Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.091422 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.091497 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:56 crc kubenswrapper[4721]: E0130 21:17:56.091610 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.091635 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:56 crc kubenswrapper[4721]: E0130 21:17:56.092243 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.092587 4721 scope.go:117] "RemoveContainer" containerID="838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173" Jan 30 21:17:56 crc kubenswrapper[4721]: E0130 21:17:56.092838 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:56 crc kubenswrapper[4721]: E0130 21:17:56.092966 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.093076 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:56 crc kubenswrapper[4721]: E0130 21:17:56.093189 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.130509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.130541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.130552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.130571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.130582 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.233352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.233397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.233413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.233436 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.233452 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.337050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.337106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.337122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.337148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.337163 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.441031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.441110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.441123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.441141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.441173 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.545453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.545488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.545496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.545511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.545521 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.647704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.647774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.647792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.647818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.647837 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.750770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.750829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.750837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.750854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.750865 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.854133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.854187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.854197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.854213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.854224 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.957836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.957893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.957915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.957940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:56 crc kubenswrapper[4721]: I0130 21:17:56.957958 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:56Z","lastTransitionTime":"2026-01-30T21:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.056352 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 05:58:27.441888516 +0000 UTC Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.060876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.060954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.060984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.061061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.061090 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.164177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.164237 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.164255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.164281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.164323 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.266918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.267021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.267037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.267060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.267073 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.370513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.370573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.370587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.370612 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.370626 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.473209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.473253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.473264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.473281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.473315 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.576403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.576479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.576502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.576534 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.576556 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.679865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.679913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.679924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.679942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.679953 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.783106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.783162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.783176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.783198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.783212 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.885484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.885515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.885523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.885538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.885547 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.987759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.987787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.987795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.987808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:57 crc kubenswrapper[4721]: I0130 21:17:57.987815 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:57Z","lastTransitionTime":"2026-01-30T21:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.056903 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 01:39:55.347291206 +0000 UTC Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.090395 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.090426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.090435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.090448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.090456 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.091622 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.091646 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.091700 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.091622 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:58 crc kubenswrapper[4721]: E0130 21:17:58.091760 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:17:58 crc kubenswrapper[4721]: E0130 21:17:58.091847 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:17:58 crc kubenswrapper[4721]: E0130 21:17:58.092039 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:17:58 crc kubenswrapper[4721]: E0130 21:17:58.092092 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.194346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.194398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.194407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.194425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.194438 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.297572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.297620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.297634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.297649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.297661 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.400238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.400328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.400348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.400374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.400390 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.503059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.503122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.503138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.503159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.503172 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.605984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.606035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.606045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.606066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.606076 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.708370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.708421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.708430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.708447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.708457 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.811480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.811525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.811559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.811577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.811586 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.914743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.914814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.914834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.914863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:58 crc kubenswrapper[4721]: I0130 21:17:58.914880 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:58Z","lastTransitionTime":"2026-01-30T21:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.018223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.018279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.018292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.018340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.018351 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.057164 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 15:20:26.11003359 +0000 UTC Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.121258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.121344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.121363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.121386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.121399 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.224934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.225016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.225036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.225067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.225088 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.327900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.327996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.328014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.328043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.328066 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.430856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.430911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.430922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.430941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.430955 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.478601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.478655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.478674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.478698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.478715 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.500766 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:59Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.505805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.505851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.505861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.505875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.505884 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.524925 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:59Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.531648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.531713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.531732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.531757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.531775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.545540 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:59Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.550124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.550194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.550213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.550240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.550258 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.562646 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:59Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.566627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.566712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.566740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.566770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.566791 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.580936 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:59Z is after 2025-08-24T17:21:41Z" Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.581113 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.582856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.582891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.582899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.582915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.582926 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.685516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.685561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.685570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.685588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.685599 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.788194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.788615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.788790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.789011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.789213 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.891847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.891914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.891923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.891937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.891946 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.980254 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.980410 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:59 crc kubenswrapper[4721]: E0130 21:17:59.980473 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:18:31.980457799 +0000 UTC m=+100.772359055 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.994523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.994562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.994574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.994594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:17:59 crc kubenswrapper[4721]: I0130 21:17:59.994605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:17:59Z","lastTransitionTime":"2026-01-30T21:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.057489 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 11:30:03.79894336 +0000 UTC Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.091274 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.091421 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:00 crc kubenswrapper[4721]: E0130 21:18:00.091475 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.091492 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:00 crc kubenswrapper[4721]: E0130 21:18:00.091588 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:00 crc kubenswrapper[4721]: E0130 21:18:00.091788 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.091961 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:00 crc kubenswrapper[4721]: E0130 21:18:00.092067 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.096476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.096504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.096519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.096537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.096551 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.199031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.199092 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.199109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.199133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.199152 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.301774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.301806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.301814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.301827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.301836 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.404529 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.404614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.404646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.404682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.404710 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.507411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.507463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.507480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.507502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.507518 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.609274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.609332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.609343 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.609362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.609371 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.711759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.712082 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.712155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.712231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.712286 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.814603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.814672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.814686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.814705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.814717 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.917822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.917864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.917873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.917892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:00 crc kubenswrapper[4721]: I0130 21:18:00.917903 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:00Z","lastTransitionTime":"2026-01-30T21:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.020089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.020126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.020135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.020150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.020162 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.058539 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 00:39:53.278327043 +0000 UTC Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.122280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.122368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.122388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.122410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.122427 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.224381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.224423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.224432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.224443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.224452 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.326818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.326863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.326878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.326900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.326911 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.429451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.429478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.429486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.429499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.429509 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.531511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.531557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.531570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.531588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.531599 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.633983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.634018 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.634027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.634042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.634052 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.736118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.736156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.736168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.736182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.736193 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.838208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.838254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.838265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.838281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.838291 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.940001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.940054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.940066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.940079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:01 crc kubenswrapper[4721]: I0130 21:18:01.940124 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:01Z","lastTransitionTime":"2026-01-30T21:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.042004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.042041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.042049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.042063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.042072 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.059341 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 12:20:30.228543329 +0000 UTC Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.091967 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.092013 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.092064 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.092064 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:02 crc kubenswrapper[4721]: E0130 21:18:02.092181 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:02 crc kubenswrapper[4721]: E0130 21:18:02.092246 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:02 crc kubenswrapper[4721]: E0130 21:18:02.092338 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:02 crc kubenswrapper[4721]: E0130 21:18:02.093248 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.104318 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.116565 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.133211 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.146503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.146541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.146552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.146531 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.146567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.146579 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.156794 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.170945 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.186864 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.205599 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.219456 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.231413 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.243917 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.250398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.250444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.250453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.250469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.250479 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.257016 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.268698 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.291435 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.303421 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.317628 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.332133 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.346054 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.352639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.352688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.352697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.352715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.352724 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.454340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.454387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.454400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.454416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.454429 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.556503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.556536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.556543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.556556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.556564 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.571082 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/0.log" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.571117 4721 generic.go:334] "Generic (PLEG): container finished" podID="62d4c2ec-791a-4f32-8ba0-118cac4e72e5" containerID="99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055" exitCode=1 Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.571144 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerDied","Data":"99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.571452 4721 scope.go:117] "RemoveContainer" containerID="99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.584147 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.593883 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.611022 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.622954 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.633379 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.643226 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.658455 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.658925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.658956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.658967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.658981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.658991 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.669994 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.681442 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.692347 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.704814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.716062 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.729488 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.740129 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.757383 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.761213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.761248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.761257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.761271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.761279 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.767737 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.778506 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.791475 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:02Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.863489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.863527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.863536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.863550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.863559 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.965191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.965375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.965497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.965615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:02 crc kubenswrapper[4721]: I0130 21:18:02.965721 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:02Z","lastTransitionTime":"2026-01-30T21:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.059938 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 21:59:11.876327707 +0000 UTC Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.067521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.067552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.067561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.067574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.067584 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.169509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.169783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.169792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.169806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.169815 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.271491 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.271527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.271535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.271547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.271556 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.374287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.374358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.374369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.374383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.374393 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.476844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.476888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.476899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.476919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.476931 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.575974 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/0.log" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.576047 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerStarted","Data":"0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.578828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.578869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.578887 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.578907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.578924 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.596828 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.611004 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.628332 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.640625 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.651252 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.668861 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.679661 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.681157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.681221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.681233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.681285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.681885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.694759 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.703532 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.714821 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.729003 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.739331 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.754516 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.771997 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.783526 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.784609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.784753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.784954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.785048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.785157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.809098 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.822658 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.840700 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:03Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.887790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.887878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.887938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.888022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.888111 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.990420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.990465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.990480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.990497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:03 crc kubenswrapper[4721]: I0130 21:18:03.990509 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:03Z","lastTransitionTime":"2026-01-30T21:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.060767 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 08:22:53.485590994 +0000 UTC Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.091194 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:04 crc kubenswrapper[4721]: E0130 21:18:04.091317 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.091421 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.091480 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.091503 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:04 crc kubenswrapper[4721]: E0130 21:18:04.091558 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:04 crc kubenswrapper[4721]: E0130 21:18:04.091648 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:04 crc kubenswrapper[4721]: E0130 21:18:04.091733 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.092792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.092815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.092826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.092841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.092855 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.195871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.196117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.196356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.196526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.196665 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.299885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.300246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.300418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.300563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.300750 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.404882 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.404960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.404982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.405005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.405021 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.507582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.507622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.507633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.507648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.507659 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.609813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.609876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.609892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.609918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.609936 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.712051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.712088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.712096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.712109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.712118 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.815261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.815330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.815343 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.815363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.815377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.917926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.917977 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.918003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.918031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:04 crc kubenswrapper[4721]: I0130 21:18:04.918049 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:04Z","lastTransitionTime":"2026-01-30T21:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.020914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.020972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.020990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.021014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.021032 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.063486 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 09:55:01.892061177 +0000 UTC Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.123189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.123232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.123242 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.123256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.123266 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.225165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.225233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.225251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.225277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.225293 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.327912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.327957 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.327966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.327980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.327990 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.431231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.431269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.431278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.431309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.431320 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.533536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.533622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.533647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.533679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.533702 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.637374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.637428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.637440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.637459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.637470 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.739999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.740056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.740074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.740098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.740115 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.842893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.842943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.842958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.842981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.842999 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.945108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.945154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.945165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.945180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:05 crc kubenswrapper[4721]: I0130 21:18:05.945190 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:05Z","lastTransitionTime":"2026-01-30T21:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.047018 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.047076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.047092 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.047117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.047133 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.064340 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 02:01:50.744007633 +0000 UTC Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.092519 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.092582 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:06 crc kubenswrapper[4721]: E0130 21:18:06.092630 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.092805 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:06 crc kubenswrapper[4721]: E0130 21:18:06.092794 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:06 crc kubenswrapper[4721]: E0130 21:18:06.092872 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.093235 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:06 crc kubenswrapper[4721]: E0130 21:18:06.093402 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.149220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.149254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.149262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.149275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.149283 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.251896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.251937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.251947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.251960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.251973 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.354978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.355035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.355047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.355065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.355077 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.457569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.457616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.457638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.457663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.457686 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.560269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.560376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.560398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.560421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.560439 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.663289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.663372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.663392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.663417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.663434 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.765508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.765580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.765601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.765628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.765646 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.868270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.868385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.868408 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.868444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.868466 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.970772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.970817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.970825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.970839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:06 crc kubenswrapper[4721]: I0130 21:18:06.970848 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:06Z","lastTransitionTime":"2026-01-30T21:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.066453 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 05:28:00.050852405 +0000 UTC Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.073935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.073990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.074005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.074029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.074043 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.177001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.177061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.177079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.177101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.177122 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.280082 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.280159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.280185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.280217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.280236 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.383422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.383510 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.383531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.383566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.383587 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.487356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.487826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.487997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.488205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.488421 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.590567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.590634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.590652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.590679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.590701 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.694675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.694739 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.694757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.694785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.694810 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.797572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.797624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.797635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.797652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.797662 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.901901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.901992 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.902012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.902040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:07 crc kubenswrapper[4721]: I0130 21:18:07.902060 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:07Z","lastTransitionTime":"2026-01-30T21:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.006291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.006406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.006433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.006462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.006486 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.067372 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 05:04:45.434134658 +0000 UTC Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.091905 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.092021 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.091952 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.092148 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:08 crc kubenswrapper[4721]: E0130 21:18:08.092133 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:08 crc kubenswrapper[4721]: E0130 21:18:08.092329 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:08 crc kubenswrapper[4721]: E0130 21:18:08.092393 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:08 crc kubenswrapper[4721]: E0130 21:18:08.092785 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.093179 4721 scope.go:117] "RemoveContainer" containerID="838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.108850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.108910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.108921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.108942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.108957 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.212044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.212115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.212134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.212164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.212183 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.315972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.316061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.316084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.316121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.316145 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.420131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.420206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.420227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.420258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.420284 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.524428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.524505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.524520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.524542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.524752 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.604031 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/2.log" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.609783 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.610666 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.629678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.629745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.629772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.629803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.629822 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.633474 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.660147 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.683672 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.703603 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.723266 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.733377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.733468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.733496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.733528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.733552 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.759625 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.783463 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.814899 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.836719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.836777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.836791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.836813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.836829 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.847647 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.865118 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.882031 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.895377 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.908910 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.930047 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.939150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.939191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.939206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.939222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.939234 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:08Z","lastTransitionTime":"2026-01-30T21:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.944971 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.960328 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.970974 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:08 crc kubenswrapper[4721]: I0130 21:18:08.990134 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:08Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.041862 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.042120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.042183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.042268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.042352 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.069607 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 09:22:01.498542999 +0000 UTC Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.145587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.145958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.146103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.146290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.146488 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.250275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.250354 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.250372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.250396 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.250415 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.353773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.353821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.353840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.353864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.353882 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.456521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.456583 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.456601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.456625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.456643 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.558815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.558858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.558867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.558881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.558894 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.616482 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/3.log" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.617720 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/2.log" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.623622 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" exitCode=1 Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.623677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.623737 4721 scope.go:117] "RemoveContainer" containerID="838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.625289 4721 scope.go:117] "RemoveContainer" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.627227 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.642605 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.660911 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.662059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.662271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.662450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.662582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.662739 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.677271 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.687780 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.699486 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.714866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.714906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.714916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.714950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.714963 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.726569 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.730581 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.731258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.731353 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.731377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.731406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.731438 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.742837 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.750521 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.753963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.754043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.754063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.754094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.754114 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.757739 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.769951 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.770205 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.774392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.774424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.774437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.774457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.774468 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.786773 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.792889 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.798026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.798018 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.798094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.798116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.798145 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.798165 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.819868 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.820010 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: E0130 21:18:09.820288 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.822958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.822996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.823013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.823035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.823052 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.854750 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.878529 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.889832 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.915132 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://838f5215b6db7292ba3013fa13fa435fe41b8c7378a06d6777a7404bd5d5c173\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:17:41Z\\\",\\\"message\\\":\\\"ndler {0x1fcbb40 0x1fcb820 0x1fcb7c0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:17:41Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:17:41.873259 6367 services_controller.go:434] Service openshift-etcd/etcd retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{etcd openshift-etcd ad0a4b9d-2a7b-4f3f-9020-0c45d515459d 4800 0 2025-02-23 05:11:51 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:etcd] map[operator.openshift.io/spec-hash:0685cfaa0976bfb7ba58513629369c20bf05f4fba36949e982bdb43af328f0e1 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:etcd,Protocol:TCP,Port:2379,TargetPort:{0 2379 },NodePort:0,AppProtocol:nil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0130 21:18:09.271501 6773 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0130 21:18:09.271130 6773 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc\\\\nI0130 21:18:09.271526 6773 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF0130 21:18:09.271520 6773 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:18:09.271\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:18:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.925085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.925126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.925134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.925148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.925158 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:09Z","lastTransitionTime":"2026-01-30T21:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.926695 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:09 crc kubenswrapper[4721]: I0130 21:18:09.938192 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.027096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.027143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.027154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.027170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.027180 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.070660 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 10:04:03.495997238 +0000 UTC Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.092213 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.092261 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.092282 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:10 crc kubenswrapper[4721]: E0130 21:18:10.092355 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.092364 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:10 crc kubenswrapper[4721]: E0130 21:18:10.092443 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:10 crc kubenswrapper[4721]: E0130 21:18:10.092572 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:10 crc kubenswrapper[4721]: E0130 21:18:10.092625 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.129453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.129484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.129493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.129503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.129512 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.231607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.231656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.231666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.231681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.231693 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.333627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.333712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.333734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.333757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.333776 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.437003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.437063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.437080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.437107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.437124 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.540773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.540865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.540900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.540933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.541074 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.627798 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/3.log" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.631378 4721 scope.go:117] "RemoveContainer" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" Jan 30 21:18:10 crc kubenswrapper[4721]: E0130 21:18:10.631628 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.642882 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.642918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.642928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.642943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.642953 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.648730 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.659641 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.678278 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.696371 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.712361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.723701 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.732860 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.742526 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.745108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.745137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.745148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.745164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.745175 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.759844 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0130 21:18:09.271501 6773 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0130 21:18:09.271130 6773 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc\\\\nI0130 21:18:09.271526 6773 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF0130 21:18:09.271520 6773 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:18:09.271\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:18:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.773290 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.790398 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.808153 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.824688 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.837625 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.847198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.847275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.847328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.847366 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.847390 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.851442 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.867032 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.882717 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.894990 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:10Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.950500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.950553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.950572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.950593 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:10 crc kubenswrapper[4721]: I0130 21:18:10.950605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:10Z","lastTransitionTime":"2026-01-30T21:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.053006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.053052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.053064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.053081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.053093 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.071584 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 02:53:56.4406999 +0000 UTC Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.156168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.156224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.156236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.156255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.156269 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.259633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.259710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.259735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.259765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.259789 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.362459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.362520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.362537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.362563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.362582 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.464985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.465034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.465042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.465058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.465067 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.567894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.567966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.567990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.568017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.568034 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.670568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.670607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.670615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.670628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.670638 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.773248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.773370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.773395 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.773427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.773450 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.876227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.876330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.876356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.876390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.876412 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.979246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.979330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.979350 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.979373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:11 crc kubenswrapper[4721]: I0130 21:18:11.979391 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:11Z","lastTransitionTime":"2026-01-30T21:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.072059 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 05:11:45.433819596 +0000 UTC Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.082098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.082145 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.082154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.082167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.082177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.091455 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.091479 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.091482 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:12 crc kubenswrapper[4721]: E0130 21:18:12.091541 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.091718 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:12 crc kubenswrapper[4721]: E0130 21:18:12.091726 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:12 crc kubenswrapper[4721]: E0130 21:18:12.091851 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:12 crc kubenswrapper[4721]: E0130 21:18:12.091931 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.117188 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.130695 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.145721 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.158072 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.177219 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.183897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.183942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.183951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.183965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.183973 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.187688 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.202066 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.213361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.225925 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.236900 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.248630 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.259190 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.279379 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0130 21:18:09.271501 6773 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0130 21:18:09.271130 6773 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc\\\\nI0130 21:18:09.271526 6773 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF0130 21:18:09.271520 6773 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:18:09.271\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:18:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.286093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.286121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.286132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.286145 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.286154 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.291856 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.306477 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.317422 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.329877 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.340033 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:12Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.388344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.388394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.388411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.388434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.388455 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.491951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.492010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.492035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.492064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.492087 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.595708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.595764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.595777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.595797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.595810 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.699204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.699276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.699337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.699369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.699387 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.802952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.803060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.803080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.803106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.803122 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.906521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.906572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.906588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.906615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:12 crc kubenswrapper[4721]: I0130 21:18:12.906631 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:12Z","lastTransitionTime":"2026-01-30T21:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.009363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.009422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.009439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.009461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.009477 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.073283 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 05:39:48.106810988 +0000 UTC Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.114083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.114168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.114187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.114731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.114794 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.217835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.217906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.217927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.217954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.217974 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.320450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.320517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.320542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.320570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.320895 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.423790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.423828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.423839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.423857 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.423870 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.526505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.526553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.526564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.526578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.526591 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.633556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.633620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.633653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.633682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.633704 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.735883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.735936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.735952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.735976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.735993 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.838468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.838532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.838550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.838575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.838613 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.941222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.941281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.941331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.941359 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:13 crc kubenswrapper[4721]: I0130 21:18:13.941377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:13Z","lastTransitionTime":"2026-01-30T21:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.044736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.044837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.044858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.044882 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.044899 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.073991 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 20:02:46.980184406 +0000 UTC Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.091649 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:14 crc kubenswrapper[4721]: E0130 21:18:14.091853 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.092152 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:14 crc kubenswrapper[4721]: E0130 21:18:14.092254 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.092500 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.092534 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:14 crc kubenswrapper[4721]: E0130 21:18:14.092679 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:14 crc kubenswrapper[4721]: E0130 21:18:14.092893 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.148323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.148387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.148404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.148427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.148444 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.252843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.252890 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.252903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.252966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.252978 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.356514 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.356632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.356695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.356719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.356777 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.459498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.459546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.459564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.459586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.459626 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.562927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.563127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.563155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.563190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.563215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.666081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.666254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.666284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.666345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.666375 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.769417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.769476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.769497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.769533 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.769557 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.872555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.872625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.872643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.872672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.872692 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.974630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.974670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.974683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.974698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:14 crc kubenswrapper[4721]: I0130 21:18:14.974707 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:14Z","lastTransitionTime":"2026-01-30T21:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.074265 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 11:04:27.339656978 +0000 UTC Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.077288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.077358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.077371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.077388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.077403 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.180551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.180597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.180614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.180637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.180654 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.283452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.283501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.283511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.283526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.283535 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.386608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.386654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.386667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.386686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.386700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.489423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.489477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.489490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.489508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.489521 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.591791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.591839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.591849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.591864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.591874 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.694652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.694696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.694705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.694719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.694729 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.797206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.797255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.797270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.797290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.797321 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.898981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.899030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.899038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.899054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.899065 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:15Z","lastTransitionTime":"2026-01-30T21:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.946036 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.946242 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:15 crc kubenswrapper[4721]: E0130 21:18:15.946343 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.946268693 +0000 UTC m=+148.738170159 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:18:15 crc kubenswrapper[4721]: E0130 21:18:15.946409 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:18:15 crc kubenswrapper[4721]: I0130 21:18:15.946426 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:15 crc kubenswrapper[4721]: E0130 21:18:15.946488 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.946464589 +0000 UTC m=+148.738365875 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 21:18:15 crc kubenswrapper[4721]: E0130 21:18:15.946617 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:18:15 crc kubenswrapper[4721]: E0130 21:18:15.946716 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.946693866 +0000 UTC m=+148.738595302 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.001790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.001851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.001868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.001892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.001911 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.047555 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.047595 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047709 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047725 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047736 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047784 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.04777022 +0000 UTC m=+148.839671466 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047824 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047881 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.047901 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.048003 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.047970967 +0000 UTC m=+148.839872243 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.074754 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 10:38:20.307075912 +0000 UTC Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.092263 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.092283 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.092440 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.092625 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.092653 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.092871 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.093035 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:16 crc kubenswrapper[4721]: E0130 21:18:16.093202 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.104609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.104662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.104679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.104701 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.104719 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.207956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.208029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.208048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.208074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.208100 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.311225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.311338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.311367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.311399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.311421 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.414854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.414928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.414958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.414991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.415013 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.517619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.517698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.517727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.517759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.517785 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.621763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.621822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.621839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.621861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.621880 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.725000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.725048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.725064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.725108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.725126 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.827524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.827573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.827586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.827604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.827625 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.930876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.930951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.930978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.931009 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:16 crc kubenswrapper[4721]: I0130 21:18:16.931030 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:16Z","lastTransitionTime":"2026-01-30T21:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.034179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.034227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.034240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.034266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.034283 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.075797 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 02:43:06.271888943 +0000 UTC Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.137074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.137149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.137168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.137190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.137208 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.240143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.240201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.240221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.240245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.240264 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.342857 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.342895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.342908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.342924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.342937 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.446762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.446849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.446870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.446904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.446928 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.549717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.549785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.549808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.549839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.549863 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.653693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.653835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.654163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.654206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.654225 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.756895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.756949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.756967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.756995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.757017 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.864911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.864970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.864986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.865009 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.865026 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.967458 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.967518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.967537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.967563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:17 crc kubenswrapper[4721]: I0130 21:18:17.967580 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:17Z","lastTransitionTime":"2026-01-30T21:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.070439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.070502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.070520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.070544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.070562 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.076633 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 14:18:48.214631426 +0000 UTC Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.091989 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.092065 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.091991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:18 crc kubenswrapper[4721]: E0130 21:18:18.092251 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.092347 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:18 crc kubenswrapper[4721]: E0130 21:18:18.092358 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:18 crc kubenswrapper[4721]: E0130 21:18:18.092567 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:18 crc kubenswrapper[4721]: E0130 21:18:18.092736 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.109393 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.174096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.174142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.174158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.174181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.174205 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.276259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.276292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.276323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.276340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.276350 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.379437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.379478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.379488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.379503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.379514 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.482604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.482650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.482658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.482672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.482682 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.585790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.585851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.585880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.585908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.585925 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.688692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.688752 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.688769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.688792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.688811 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.792501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.792560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.792584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.792614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.792638 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.895751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.895795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.895813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.895835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.895852 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.999139 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.999193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.999214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.999241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:18 crc kubenswrapper[4721]: I0130 21:18:18.999261 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:18Z","lastTransitionTime":"2026-01-30T21:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.077425 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 21:04:16.890027244 +0000 UTC Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.102248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.102382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.102409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.102435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.102455 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.206625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.206727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.206751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.206785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.206805 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.311480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.311516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.311527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.311540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.311549 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.415406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.415481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.415500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.415528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.415546 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.519527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.519571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.519581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.519600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.519614 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.622364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.622433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.622452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.622480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.622502 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.728475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.728545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.728563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.728594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.728613 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.832232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.832294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.832335 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.832360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.832378 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.861702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.861764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.861789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.861816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.861837 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: E0130 21:18:19.883517 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.889427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.889483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.889501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.889532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.889551 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: E0130 21:18:19.910212 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.915746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.915864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.915893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.915925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.915947 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: E0130 21:18:19.940359 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.945859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.945907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.945924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.945950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.945967 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: E0130 21:18:19.967135 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.973266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.973364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.973382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.973776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.974154 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:19 crc kubenswrapper[4721]: E0130 21:18:19.995900 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:19Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:19 crc kubenswrapper[4721]: E0130 21:18:19.996116 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.998158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.998213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.998231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.998255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:19 crc kubenswrapper[4721]: I0130 21:18:19.998279 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:19Z","lastTransitionTime":"2026-01-30T21:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.077871 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 19:28:39.799921171 +0000 UTC Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.091622 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.091660 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.091757 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:20 crc kubenswrapper[4721]: E0130 21:18:20.091921 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.091951 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:20 crc kubenswrapper[4721]: E0130 21:18:20.092149 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:20 crc kubenswrapper[4721]: E0130 21:18:20.092138 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:20 crc kubenswrapper[4721]: E0130 21:18:20.092256 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.100754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.100808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.100831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.100859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.100881 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.203781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.203826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.203842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.203864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.203881 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.306634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.306693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.306711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.306736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.306755 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.409210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.409274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.409292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.409345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.409367 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.513351 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.513419 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.513437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.513463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.513485 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.617657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.617739 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.617759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.617783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.617833 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.721813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.721889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.721907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.721933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.721950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.826030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.826197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.826232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.826356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.826391 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.930741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.930810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.930829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.930904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:20 crc kubenswrapper[4721]: I0130 21:18:20.930924 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:20Z","lastTransitionTime":"2026-01-30T21:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.033848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.033884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.033893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.033908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.033917 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.079028 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 19:13:01.340857514 +0000 UTC Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.136971 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.137027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.137044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.137071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.137087 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.240606 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.240660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.240677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.240700 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.240717 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.343258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.343310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.343318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.343333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.343343 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.446785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.446865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.446887 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.446916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.446939 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.549760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.549822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.549846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.549878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.549901 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.653447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.653645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.653744 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.653771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.653788 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.756601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.756660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.756682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.756707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.756726 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.859427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.859540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.859610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.859647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.859701 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.962821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.962878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.962895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.962923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:21 crc kubenswrapper[4721]: I0130 21:18:21.962942 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:21Z","lastTransitionTime":"2026-01-30T21:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.066668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.066740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.066762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.066790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.066812 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.080132 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 22:38:55.668979038 +0000 UTC Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.091567 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.091705 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:22 crc kubenswrapper[4721]: E0130 21:18:22.092017 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.092069 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.092076 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:22 crc kubenswrapper[4721]: E0130 21:18:22.092234 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:22 crc kubenswrapper[4721]: E0130 21:18:22.092513 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:22 crc kubenswrapper[4721]: E0130 21:18:22.092724 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.115647 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1295f75-1b30-4453-af52-c3e8bb8a81b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f4c3c8ac814185256c7dfef1d0437fffa3f21aef2b3e89f74721cccb9354ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afcfb9c9ecd3a3a926eee6331337f3fe1eaa209dc4f162dc06cbde8cd8287a4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://09cf11214233c7b57a934a2227a389846b07b87e270556aca08c71b32e7b82a7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.137227 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b87a35474fde72727fab111f12fa736688763f529caea469f33e437a0877cfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c51331e179eea04c5d8bad675b9414928dafcc6f6f269f0b409e0413176ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.156694 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3605450b-d454-45c7-a954-b6eaac208991\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b822772fcc2456623f8bdf181a9afea0269e2c3fa1afc510d6fa074941e975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a66da514fa201b142d5545ecb4c9305250aa2a8fe594779c48c997d097783e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2rcxt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sr2j9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.172277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.172365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.172388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.172417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.172439 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.180165 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.198636 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pmc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"498f7a03-bce3-4988-b1c7-67fb7feb7700\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b1ca677c6cd15368e25d750d4857dc35b4b7b16ae084eed89c4a014de3808c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8mpn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pmc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.215891 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bc3642c-006f-483e-a590-7acb26d18f76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2aeb4c8eb7a6438319ea7064767a1d12873fb564a054b889148fdaa48288919a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5af3cc5328494b22f5b9c088d552a32660fd1f88a82ff15c62351a7f2fb989d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af3cc5328494b22f5b9c088d552a32660fd1f88a82ff15c62351a7f2fb989d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.247499 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb00a0c3-88a0-40d6-b122-5a3b9724d88a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21c918478f060da9a7ee682433237bc1cb0c6b110791a0993899bd89c6d02a69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b449f12e2096bf09c6f657a8976571d78e372dae478f11559dc76f5a023807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5407e663761dfcb397bd78cf22a7bf00ee09756c85c06e58cfa06eb394ebf8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfd5a5944660aa13406a9a6cb976b5ab05eeecf647ea196ea60fd5c475f7c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://319546e4bce2d37c4cd15d42f08971e136328458c5af93011780d489368cf9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5771666a1a171802933cf18cfebef6904b328e0ca59f54bd183b0586bfb1498e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d18643f41d61f04351a9f670ade751a656316899662e1b706294c76a6f3f23b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06679c50206eaa9a8ed02b5f06c9e1a61dd79dc9ab7dd5cda9bc2d7214e0e584\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.267122 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab225a894414e0903baf1ef6a2e29c2cf39f01d68ed99e674b9a4fcea1db9a6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.275579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.275636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.275653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.275681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.275700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.288387 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b612750c209b310fe14a6a2dd70c6cbf6d1a44feb73de70eadcf93e169ad53c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.305245 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jlpj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c455fa6-c3c0-4d87-871a-e038553054c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e38db013ec46759f7011d2d9efb9d6d7e0dd6da38a7c8cfd3101c0f3e2404386\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwbg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:13Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jlpj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.334174 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2b454df8-f3d9-4751-a678-923066b3d3e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2063dc7aed88fcb9a8adcbd922bc2987f0616cd907cd9331a92c6af60e3301f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b801604478f067b9e0ba607a478a7adc0d3421b2ff7509fadb50dca0683b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdbdc1be6132ab1cc5db1ad52e51433018091eede9dce87087dccc881a26ecf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://853bd0c42fa1910d64d6534e6d679584521e9ff0cdc2f13d79887941e68def69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a9ad9c1b359e1991d529e598b1d892ca073a77c307a82ff8df8c9682e9d688b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://05799a6d81e4a1b8dd7f0c8814973e877c33961f697187db13ee182bcfa9a39f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e217292c7002af82dd39734cf3ec8011e91b20f5ab21abe626cd1530bf494d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7s76\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5gtkk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.358974 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6849017-7980-4ba5-a79f-e4f9949309b9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T21:17:06Z\\\",\\\"message\\\":\\\"W0130 21:16:55.360535 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0130 21:16:55.360953 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769807815 cert, and key in /tmp/serving-cert-4157230187/serving-signer.crt, /tmp/serving-cert-4157230187/serving-signer.key\\\\nI0130 21:16:55.715824 1 observer_polling.go:159] Starting file observer\\\\nW0130 21:16:55.719605 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0130 21:16:55.720033 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 21:16:55.721405 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4157230187/tls.crt::/tmp/serving-cert-4157230187/tls.key\\\\\\\"\\\\nF0130 21:17:06.016029 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.378406 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79d39ab7-8689-4526-9272-89b36376f764\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:16:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2317b1017bdb0016f83ce26aca03739dca5ed39997df3b289bddcc875cf6fb18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9be765d124a0db837f5b3f8d348bf1ccb331a52969bbbead2db1e5483f78678a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac99c388a21b8b506e8ad9434f4f2f6da5ed86a7d5011333cc47e4da91c1a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:16:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06b0acaaa4dc8421fa1bf2af5e8ae5df18c9bcb5e6df3ac1c54269400d997a66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:16:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:16:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:16:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.379266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.379369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.379387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.379410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.379427 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.399473 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.418959 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.441184 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g7fgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4c2ec-791a-4f32-8ba0-118cac4e72e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:02Z\\\",\\\"message\\\":\\\"2026-01-30T21:17:16+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b\\\\n2026-01-30T21:17:16+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c3619ea8-4302-424b-83ab-58193345070b to /host/opt/cni/bin/\\\\n2026-01-30T21:17:16Z [verbose] multus-daemon started\\\\n2026-01-30T21:17:16Z [verbose] Readiness Indicator file check\\\\n2026-01-30T21:18:01Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:18:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztrr2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g7fgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.459725 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83a76f34-15d7-45c2-874e-d44709adbd11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4213e56044e2e95d65dff616185522d11cddfe93ee9a506b7a1d050d555b505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zq7rt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p24tc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.482394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.482451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.482470 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.482493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.482510 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.493350 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f295c622-6366-498b-b846-24316b3ad5b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T21:18:09Z\\\",\\\"message\\\":\\\"/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0130 21:18:09.271501 6773 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0130 21:18:09.271130 6773 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc\\\\nI0130 21:18:09.271526 6773 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF0130 21:18:09.271520 6773 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:09Z is after 2025-08-24T17:21:41Z]\\\\nI0130 21:18:09.271\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T21:18:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T21:17:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T21:17:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T21:17:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9mr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2p5n5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.511537 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bkv95" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19fca1ba-eb6d-479c-90ff-e55739aed640\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T21:17:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq9db\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T21:17:27Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bkv95\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:22Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.585988 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.586035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.586051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.586074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.586091 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.689372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.689410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.689421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.689438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.689450 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.791976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.792050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.792073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.792104 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.792126 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.895855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.895925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.895947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.895973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.895992 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.999130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.999178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.999217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.999238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:22 crc kubenswrapper[4721]: I0130 21:18:22.999252 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:22Z","lastTransitionTime":"2026-01-30T21:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.080655 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 15:48:38.152221746 +0000 UTC Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.101538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.101609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.101635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.101668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.101692 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.205172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.205233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.205256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.205287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.205341 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.308416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.308457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.308469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.308483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.308495 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.411715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.411767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.411784 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.411806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.411824 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.514446 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.514485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.514498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.514515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.514526 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.617645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.617715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.617732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.617756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.617775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.720013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.720110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.720127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.720152 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.720169 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.823215 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.823266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.823283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.823330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.823348 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.926260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.926364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.926386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.926417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:23 crc kubenswrapper[4721]: I0130 21:18:23.926440 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:23Z","lastTransitionTime":"2026-01-30T21:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.029040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.029090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.029099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.029114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.029122 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.081573 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 04:40:43.769166876 +0000 UTC Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.091852 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.091939 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:24 crc kubenswrapper[4721]: E0130 21:18:24.091985 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.092019 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:24 crc kubenswrapper[4721]: E0130 21:18:24.092158 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.093679 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:24 crc kubenswrapper[4721]: E0130 21:18:24.093733 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:24 crc kubenswrapper[4721]: E0130 21:18:24.093896 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.131060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.131116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.131131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.131152 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.131167 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.232919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.232967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.232978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.232996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.233008 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.335625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.335667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.335676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.335689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.335698 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.438348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.438390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.438401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.438414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.438423 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.540803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.540861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.540879 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.540903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.540920 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.643722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.643794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.643822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.643848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.643869 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.746714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.746787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.746812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.746841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.746865 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.850062 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.850126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.850148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.850176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.850197 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.953133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.953200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.953219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.953243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:24 crc kubenswrapper[4721]: I0130 21:18:24.953263 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:24Z","lastTransitionTime":"2026-01-30T21:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.056274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.056624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.056633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.056645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.056654 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.081963 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 15:53:05.122834178 +0000 UTC Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.092617 4721 scope.go:117] "RemoveContainer" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" Jan 30 21:18:25 crc kubenswrapper[4721]: E0130 21:18:25.092873 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.160247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.160338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.160356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.160379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.160395 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.263540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.263608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.263631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.263664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.263688 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.366691 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.366764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.366785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.366816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.366837 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.469106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.469155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.469171 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.469191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.469204 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.571837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.571938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.571952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.571970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.571983 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.675099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.675176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.675196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.675229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.675253 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.777996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.778053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.778073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.778100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.778120 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.880821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.880888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.880909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.880934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.880952 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.983611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.983676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.983699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.983724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:25 crc kubenswrapper[4721]: I0130 21:18:25.983743 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:25Z","lastTransitionTime":"2026-01-30T21:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.087280 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 20:24:32.494213057 +0000 UTC Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.091619 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.091687 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.091742 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.091848 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:26 crc kubenswrapper[4721]: E0130 21:18:26.091907 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:26 crc kubenswrapper[4721]: E0130 21:18:26.092124 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:26 crc kubenswrapper[4721]: E0130 21:18:26.092245 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.092587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: E0130 21:18:26.092632 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.092671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.092687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.092707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.092719 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.195664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.195701 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.195713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.195731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.195750 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.298387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.298460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.298475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.298493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.298505 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.400981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.401040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.401057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.401082 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.401099 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.503705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.503747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.503757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.503771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.503781 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.606522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.606578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.606590 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.606610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.606623 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.709386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.709454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.709472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.709495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.709513 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.812140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.812189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.812203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.812220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.812232 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.914941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.914995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.915007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.915026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:26 crc kubenswrapper[4721]: I0130 21:18:26.915040 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:26Z","lastTransitionTime":"2026-01-30T21:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.018440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.018490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.018501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.018519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.018532 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.088090 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 23:16:49.724904997 +0000 UTC Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.121560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.121652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.121686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.121718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.121741 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.224489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.224558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.224577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.224603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.224621 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.327270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.327338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.327350 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.327368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.327377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.429252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.429278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.429285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.429314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.429323 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.531444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.531469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.531477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.531488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.531496 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.634239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.634315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.634326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.634356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.634385 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.738360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.738429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.738456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.738485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.738505 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.842271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.842357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.842375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.842403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.842421 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.945809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.945876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.945903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.945937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:27 crc kubenswrapper[4721]: I0130 21:18:27.945961 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:27Z","lastTransitionTime":"2026-01-30T21:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.048327 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.048372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.048389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.048406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.048416 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.088865 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 18:19:45.705082462 +0000 UTC Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.092340 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.092433 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.092345 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.092443 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:28 crc kubenswrapper[4721]: E0130 21:18:28.092823 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:28 crc kubenswrapper[4721]: E0130 21:18:28.093206 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:28 crc kubenswrapper[4721]: E0130 21:18:28.093631 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:28 crc kubenswrapper[4721]: E0130 21:18:28.093084 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.150787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.150825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.150836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.150852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.150863 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.253581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.253621 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.253660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.253678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.253688 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.356220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.356250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.356261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.356276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.356286 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.458288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.458337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.458348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.458364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.458375 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.561572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.561642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.561674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.561703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.561722 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.664806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.664947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.664984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.665014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.665036 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.767551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.767626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.767647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.767680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.767701 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.870530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.870578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.870591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.870607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.870621 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.973283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.973346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.973357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.973377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:28 crc kubenswrapper[4721]: I0130 21:18:28.973389 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:28Z","lastTransitionTime":"2026-01-30T21:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.076081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.076136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.076153 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.076172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.076184 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.089193 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 23:41:19.429435593 +0000 UTC Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.178735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.178773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.178782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.178796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.178805 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.281607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.281691 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.281710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.281735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.281752 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.385324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.385373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.385388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.385411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.385422 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.488024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.488108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.488133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.488157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.488175 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.590846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.590875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.590883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.590896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.590904 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.693232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.693273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.693284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.693318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.693327 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.795805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.795842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.795850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.795862 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.795870 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.898578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.898642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.898661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.898683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:29 crc kubenswrapper[4721]: I0130 21:18:29.898700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:29Z","lastTransitionTime":"2026-01-30T21:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.000853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.000908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.000918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.000933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.000942 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.089977 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 02:38:37.411164136 +0000 UTC Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.091388 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.091532 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.091585 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.091675 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.091755 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.092028 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.092157 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.092388 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.103950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.104026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.104049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.104076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.104097 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.207539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.207673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.207692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.207722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.207745 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.254615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.254706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.254726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.254754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.254773 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.274815 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.280704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.280769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.280795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.280854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.280878 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.297123 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.301547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.301617 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.301638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.301666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.301686 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.320944 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.325865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.325925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.325944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.325968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.325988 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.344627 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.350128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.350203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.350227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.350256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.350276 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.367418 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T21:18:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d55cd7e2-7ca0-4ee4-9f64-b636d350d409\\\",\\\"systemUUID\\\":\\\"dce14e0b-51ff-48a1-84fb-60746c76c1b3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T21:18:30Z is after 2025-08-24T17:21:41Z" Jan 30 21:18:30 crc kubenswrapper[4721]: E0130 21:18:30.367672 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.369149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.369186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.369196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.369211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.369221 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.472542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.472600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.472623 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.472649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.472666 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.575470 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.575550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.575571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.575601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.575623 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.678631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.678735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.678762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.678790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.678810 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.781570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.781612 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.781630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.781653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.781671 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.885225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.885272 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.885287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.885356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.885375 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.987876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.987945 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.987967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.987995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:30 crc kubenswrapper[4721]: I0130 21:18:30.988016 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:30Z","lastTransitionTime":"2026-01-30T21:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.091579 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 13:31:50.271343501 +0000 UTC Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.097707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.097769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.097849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.097945 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.097965 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.201808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.201852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.201860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.201874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.201883 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.304484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.304551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.304570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.304596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.304615 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.407672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.407768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.407781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.407798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.407810 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.510228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.510283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.510340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.510364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.510378 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.613111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.613178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.613195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.613224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.613242 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.716751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.716812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.716828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.716851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.716867 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.820103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.820157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.820175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.820216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.820239 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.923214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.923335 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.923358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.923385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:31 crc kubenswrapper[4721]: I0130 21:18:31.923403 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:31Z","lastTransitionTime":"2026-01-30T21:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.018613 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:32 crc kubenswrapper[4721]: E0130 21:18:32.018839 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:18:32 crc kubenswrapper[4721]: E0130 21:18:32.018982 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs podName:19fca1ba-eb6d-479c-90ff-e55739aed640 nodeName:}" failed. No retries permitted until 2026-01-30 21:19:36.018946428 +0000 UTC m=+164.810847704 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs") pod "network-metrics-daemon-bkv95" (UID: "19fca1ba-eb6d-479c-90ff-e55739aed640") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.026461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.026528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.026553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.026584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.026609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.091679 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.091806 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.091865 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.091887 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.091894 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 17:08:54.806202304 +0000 UTC Jan 30 21:18:32 crc kubenswrapper[4721]: E0130 21:18:32.092965 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:32 crc kubenswrapper[4721]: E0130 21:18:32.093250 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:32 crc kubenswrapper[4721]: E0130 21:18:32.093438 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:32 crc kubenswrapper[4721]: E0130 21:18:32.094797 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.136206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.136263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.136280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.136336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.136360 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.163968 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-g7fgc" podStartSLOduration=78.163946577 podStartE2EDuration="1m18.163946577s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.163852284 +0000 UTC m=+100.955753540" watchObservedRunningTime="2026-01-30 21:18:32.163946577 +0000 UTC m=+100.955847823" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.187453 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podStartSLOduration=78.187436039 podStartE2EDuration="1m18.187436039s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.18685756 +0000 UTC m=+100.978758856" watchObservedRunningTime="2026-01-30 21:18:32.187436039 +0000 UTC m=+100.979337295" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.239041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.239080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.239090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.239106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.239117 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.250388 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=80.250370751 podStartE2EDuration="1m20.250370751s" podCreationTimestamp="2026-01-30 21:17:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.249856016 +0000 UTC m=+101.041757262" watchObservedRunningTime="2026-01-30 21:18:32.250370751 +0000 UTC m=+101.042272007" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.264536 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=44.264521644 podStartE2EDuration="44.264521644s" podCreationTimestamp="2026-01-30 21:17:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.264321878 +0000 UTC m=+101.056223124" watchObservedRunningTime="2026-01-30 21:18:32.264521644 +0000 UTC m=+101.056422890" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.301227 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=77.301205118 podStartE2EDuration="1m17.301205118s" podCreationTimestamp="2026-01-30 21:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.300712812 +0000 UTC m=+101.092614068" watchObservedRunningTime="2026-01-30 21:18:32.301205118 +0000 UTC m=+101.093106384" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.301649 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sr2j9" podStartSLOduration=78.301639422 podStartE2EDuration="1m18.301639422s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.284719271 +0000 UTC m=+101.076620527" watchObservedRunningTime="2026-01-30 21:18:32.301639422 +0000 UTC m=+101.093540678" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.341128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.341174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.341186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.341203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.341215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.347463 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-pmc6z" podStartSLOduration=78.347444678 podStartE2EDuration="1m18.347444678s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.346674983 +0000 UTC m=+101.138576239" watchObservedRunningTime="2026-01-30 21:18:32.347444678 +0000 UTC m=+101.139345924" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.395070 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-jlpj8" podStartSLOduration=79.395047351 podStartE2EDuration="1m19.395047351s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.39442531 +0000 UTC m=+101.186326566" watchObservedRunningTime="2026-01-30 21:18:32.395047351 +0000 UTC m=+101.186948607" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.420629 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-5gtkk" podStartSLOduration=78.420611909 podStartE2EDuration="1m18.420611909s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.417883071 +0000 UTC m=+101.209784327" watchObservedRunningTime="2026-01-30 21:18:32.420611909 +0000 UTC m=+101.212513155" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.443802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.443880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.443895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.443914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.443983 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.463039 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=14.463015744 podStartE2EDuration="14.463015744s" podCreationTimestamp="2026-01-30 21:18:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.431005401 +0000 UTC m=+101.222906677" watchObservedRunningTime="2026-01-30 21:18:32.463015744 +0000 UTC m=+101.254917010" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.546866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.546910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.546922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.546937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.546949 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.649961 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.650019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.650030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.650046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.650057 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.752164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.752245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.752271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.752445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.752472 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.856811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.856866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.856886 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.856912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.856931 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.960463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.960543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.960563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.960592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:32 crc kubenswrapper[4721]: I0130 21:18:32.960614 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:32Z","lastTransitionTime":"2026-01-30T21:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.065065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.065141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.065162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.065191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.065214 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.095533 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 21:48:16.566115163 +0000 UTC Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.168134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.168191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.168202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.168220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.168233 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.271897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.271972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.271992 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.272023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.272042 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.375662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.375746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.375771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.375807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.375830 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.479238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.479350 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.479370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.479404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.479426 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.583138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.583211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.583233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.583267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.583290 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.686434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.686552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.686612 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.686731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.686759 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.790465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.790539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.790559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.790587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.790605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.894387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.894455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.894476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.894501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.894521 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.998353 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.998439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.998463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.998498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:33 crc kubenswrapper[4721]: I0130 21:18:33.998523 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:33Z","lastTransitionTime":"2026-01-30T21:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.091620 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.091673 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.091641 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.091724 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:34 crc kubenswrapper[4721]: E0130 21:18:34.091911 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:34 crc kubenswrapper[4721]: E0130 21:18:34.092257 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:34 crc kubenswrapper[4721]: E0130 21:18:34.092693 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:34 crc kubenswrapper[4721]: E0130 21:18:34.092785 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.096251 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 19:30:39.068310808 +0000 UTC Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.101003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.101070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.101095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.101123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.101146 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.204469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.204540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.204552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.204574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.204588 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.308021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.308112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.308133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.308173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.308195 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.411473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.411572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.411596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.411632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.411656 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.515165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.515232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.515250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.515277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.515320 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.619250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.619321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.619338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.619394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.619411 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.722826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.722937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.722965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.723005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.723026 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.827411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.827498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.827522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.827553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.827572 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.931731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.931808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.931826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.931853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:34 crc kubenswrapper[4721]: I0130 21:18:34.931872 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:34Z","lastTransitionTime":"2026-01-30T21:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.035140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.035195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.035210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.035251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.035273 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.096458 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 08:49:32.300372237 +0000 UTC Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.139182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.139266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.139286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.139352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.139378 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.243088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.243166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.243186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.243219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.243254 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.347610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.347696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.347722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.347760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.347785 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.451478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.451587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.451614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.451646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.451670 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.554173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.554223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.554231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.554246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.554256 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.656751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.656797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.656806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.656822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.656832 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.760031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.760102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.760128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.760161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.760185 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.862765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.862823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.862840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.862865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.862886 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.965514 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.965572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.965588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.965613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:35 crc kubenswrapper[4721]: I0130 21:18:35.965632 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:35Z","lastTransitionTime":"2026-01-30T21:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.068266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.068344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.068357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.068377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.068390 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.091228 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.091241 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.091374 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.091380 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:36 crc kubenswrapper[4721]: E0130 21:18:36.091549 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:36 crc kubenswrapper[4721]: E0130 21:18:36.091751 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:36 crc kubenswrapper[4721]: E0130 21:18:36.091835 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:36 crc kubenswrapper[4721]: E0130 21:18:36.091945 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.096610 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 12:07:58.527230353 +0000 UTC Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.171190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.171250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.171263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.171285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.171328 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.273942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.274011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.274030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.274056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.274077 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.376801 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.376877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.376903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.376936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.376963 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.479802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.479837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.479849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.479865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.479876 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.582760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.582825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.582846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.582876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.582898 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.686216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.686328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.686349 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.686382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.686446 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.789664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.789714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.789723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.789759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.789772 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.893853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.893919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.893939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.893965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.893987 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.998172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.998228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.998239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.998262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:36 crc kubenswrapper[4721]: I0130 21:18:36.998276 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:36Z","lastTransitionTime":"2026-01-30T21:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.097283 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 06:34:32.279542017 +0000 UTC Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.102736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.102817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.102837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.102866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.102887 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.206130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.206208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.206228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.206254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.206282 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.309227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.309326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.309346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.309375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.309405 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.413163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.413241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.413256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.413281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.413324 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.516598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.516668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.516684 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.516707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.516735 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.620346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.620414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.620433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.620460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.620478 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.723037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.723135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.723160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.723194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.723216 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.826897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.826989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.827010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.827047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.827066 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.931361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.931449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.931468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.931502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:37 crc kubenswrapper[4721]: I0130 21:18:37.931524 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:37Z","lastTransitionTime":"2026-01-30T21:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.035584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.035656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.035675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.035707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.035727 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.091731 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.091828 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.091831 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.091890 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:38 crc kubenswrapper[4721]: E0130 21:18:38.092748 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:38 crc kubenswrapper[4721]: E0130 21:18:38.092789 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:38 crc kubenswrapper[4721]: E0130 21:18:38.092848 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:38 crc kubenswrapper[4721]: E0130 21:18:38.092914 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.097518 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 16:12:28.550528048 +0000 UTC Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.139565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.139632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.139652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.139681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.139699 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.243467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.243831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.243899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.243976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.244047 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.347632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.347699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.347720 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.347749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.347770 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.450844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.451181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.451350 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.451540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.451689 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.556085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.556175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.556193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.556224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.556250 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.659935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.660003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.660024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.660052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.660075 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.763159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.763219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.763236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.763261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.763279 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.866963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.867476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.867584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.867710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.867813 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.971894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.972589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.972645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.972676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:38 crc kubenswrapper[4721]: I0130 21:18:38.972698 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:38Z","lastTransitionTime":"2026-01-30T21:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.076334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.076439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.076474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.076517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.076545 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.097913 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 00:30:25.689567202 +0000 UTC Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.179646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.179710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.179727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.179751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.179765 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.283847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.283904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.283916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.283935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.283989 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.387288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.387421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.387449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.387487 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.387517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.490875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.490953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.490973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.490999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.491018 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.594233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.594340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.594364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.594396 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.594417 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.697902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.697977 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.697994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.698021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.698042 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.800481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.800549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.800568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.800600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.800620 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.902982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.903070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.903089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.903123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:39 crc kubenswrapper[4721]: I0130 21:18:39.903143 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:39Z","lastTransitionTime":"2026-01-30T21:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.007336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.007421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.007446 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.007474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.007495 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.091901 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.091882 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.091951 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.092007 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:40 crc kubenswrapper[4721]: E0130 21:18:40.092088 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:40 crc kubenswrapper[4721]: E0130 21:18:40.092465 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:40 crc kubenswrapper[4721]: E0130 21:18:40.093171 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:40 crc kubenswrapper[4721]: E0130 21:18:40.093357 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.093868 4721 scope.go:117] "RemoveContainer" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" Jan 30 21:18:40 crc kubenswrapper[4721]: E0130 21:18:40.094219 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2p5n5_openshift-ovn-kubernetes(f295c622-6366-498b-b846-24316b3ad5b7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.098686 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 04:05:40.708664616 +0000 UTC Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.110277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.110366 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.110386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.110414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.110434 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.214047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.214111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.214130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.214159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.214180 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.317091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.317146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.317165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.317187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.317204 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.420067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.420144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.420164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.420194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.420213 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.523232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.523291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.523316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.523337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.523350 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.557986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.558066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.558091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.558126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.558150 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T21:18:40Z","lastTransitionTime":"2026-01-30T21:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.617379 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=85.617358177 podStartE2EDuration="1m25.617358177s" podCreationTimestamp="2026-01-30 21:17:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:32.464966407 +0000 UTC m=+101.256867673" watchObservedRunningTime="2026-01-30 21:18:40.617358177 +0000 UTC m=+109.409259423" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.619194 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc"] Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.620039 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.623224 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.623253 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.623721 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.631237 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.725389 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f0ad084f-a31b-4af4-a93c-af51409b7cd2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.725514 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/f0ad084f-a31b-4af4-a93c-af51409b7cd2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.725584 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0ad084f-a31b-4af4-a93c-af51409b7cd2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.725657 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/f0ad084f-a31b-4af4-a93c-af51409b7cd2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.725695 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0ad084f-a31b-4af4-a93c-af51409b7cd2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827094 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f0ad084f-a31b-4af4-a93c-af51409b7cd2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827196 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/f0ad084f-a31b-4af4-a93c-af51409b7cd2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0ad084f-a31b-4af4-a93c-af51409b7cd2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827376 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/f0ad084f-a31b-4af4-a93c-af51409b7cd2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827418 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0ad084f-a31b-4af4-a93c-af51409b7cd2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827411 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/f0ad084f-a31b-4af4-a93c-af51409b7cd2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.827657 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/f0ad084f-a31b-4af4-a93c-af51409b7cd2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.828806 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f0ad084f-a31b-4af4-a93c-af51409b7cd2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.838651 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0ad084f-a31b-4af4-a93c-af51409b7cd2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.859145 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0ad084f-a31b-4af4-a93c-af51409b7cd2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-45hwc\" (UID: \"f0ad084f-a31b-4af4-a93c-af51409b7cd2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:40 crc kubenswrapper[4721]: I0130 21:18:40.947393 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" Jan 30 21:18:41 crc kubenswrapper[4721]: I0130 21:18:41.099574 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 01:44:02.445914621 +0000 UTC Jan 30 21:18:41 crc kubenswrapper[4721]: I0130 21:18:41.100103 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 30 21:18:41 crc kubenswrapper[4721]: I0130 21:18:41.110806 4721 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 30 21:18:41 crc kubenswrapper[4721]: I0130 21:18:41.743098 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" event={"ID":"f0ad084f-a31b-4af4-a93c-af51409b7cd2","Type":"ContainerStarted","Data":"8b205413c700043048c09825a57c914582f4b81d10717417d89130919a30319f"} Jan 30 21:18:41 crc kubenswrapper[4721]: I0130 21:18:41.743164 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" event={"ID":"f0ad084f-a31b-4af4-a93c-af51409b7cd2","Type":"ContainerStarted","Data":"a7ea7f2f461ae13e1652dd780d5a1458d8aeba8e5de6a97876a6090ba51a4d0a"} Jan 30 21:18:41 crc kubenswrapper[4721]: I0130 21:18:41.759954 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hwc" podStartSLOduration=88.75993719 podStartE2EDuration="1m28.75993719s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:41.759126854 +0000 UTC m=+110.551028100" watchObservedRunningTime="2026-01-30 21:18:41.75993719 +0000 UTC m=+110.551838436" Jan 30 21:18:42 crc kubenswrapper[4721]: I0130 21:18:42.091835 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:42 crc kubenswrapper[4721]: I0130 21:18:42.091866 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:42 crc kubenswrapper[4721]: E0130 21:18:42.092809 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:42 crc kubenswrapper[4721]: I0130 21:18:42.092852 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:42 crc kubenswrapper[4721]: I0130 21:18:42.092821 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:42 crc kubenswrapper[4721]: E0130 21:18:42.092896 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:42 crc kubenswrapper[4721]: E0130 21:18:42.093059 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:42 crc kubenswrapper[4721]: E0130 21:18:42.093150 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:44 crc kubenswrapper[4721]: I0130 21:18:44.092599 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:44 crc kubenswrapper[4721]: I0130 21:18:44.092733 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:44 crc kubenswrapper[4721]: E0130 21:18:44.092770 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:44 crc kubenswrapper[4721]: I0130 21:18:44.092848 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:44 crc kubenswrapper[4721]: I0130 21:18:44.092854 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:44 crc kubenswrapper[4721]: E0130 21:18:44.093034 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:44 crc kubenswrapper[4721]: E0130 21:18:44.093239 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:44 crc kubenswrapper[4721]: E0130 21:18:44.093486 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:46 crc kubenswrapper[4721]: I0130 21:18:46.091402 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:46 crc kubenswrapper[4721]: I0130 21:18:46.091453 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:46 crc kubenswrapper[4721]: I0130 21:18:46.091535 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:46 crc kubenswrapper[4721]: I0130 21:18:46.091674 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:46 crc kubenswrapper[4721]: E0130 21:18:46.091664 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:46 crc kubenswrapper[4721]: E0130 21:18:46.091814 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:46 crc kubenswrapper[4721]: E0130 21:18:46.091916 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:46 crc kubenswrapper[4721]: E0130 21:18:46.092103 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.092187 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.092329 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.092339 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:48 crc kubenswrapper[4721]: E0130 21:18:48.092465 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.092502 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:48 crc kubenswrapper[4721]: E0130 21:18:48.092650 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:48 crc kubenswrapper[4721]: E0130 21:18:48.092869 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:48 crc kubenswrapper[4721]: E0130 21:18:48.092961 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.773802 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/1.log" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.774811 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/0.log" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.774913 4721 generic.go:334] "Generic (PLEG): container finished" podID="62d4c2ec-791a-4f32-8ba0-118cac4e72e5" containerID="0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04" exitCode=1 Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.774985 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerDied","Data":"0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04"} Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.775077 4721 scope.go:117] "RemoveContainer" containerID="99498f620202bfd4f76576e68fc61af41d62fe11ab5d0a32820c03df384b7055" Jan 30 21:18:48 crc kubenswrapper[4721]: I0130 21:18:48.776661 4721 scope.go:117] "RemoveContainer" containerID="0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04" Jan 30 21:18:48 crc kubenswrapper[4721]: E0130 21:18:48.777221 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-g7fgc_openshift-multus(62d4c2ec-791a-4f32-8ba0-118cac4e72e5)\"" pod="openshift-multus/multus-g7fgc" podUID="62d4c2ec-791a-4f32-8ba0-118cac4e72e5" Jan 30 21:18:49 crc kubenswrapper[4721]: I0130 21:18:49.782790 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/1.log" Jan 30 21:18:50 crc kubenswrapper[4721]: I0130 21:18:50.091881 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:50 crc kubenswrapper[4721]: I0130 21:18:50.091983 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:50 crc kubenswrapper[4721]: E0130 21:18:50.092095 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:50 crc kubenswrapper[4721]: I0130 21:18:50.092125 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:50 crc kubenswrapper[4721]: E0130 21:18:50.092184 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:50 crc kubenswrapper[4721]: I0130 21:18:50.091883 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:50 crc kubenswrapper[4721]: E0130 21:18:50.092368 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:50 crc kubenswrapper[4721]: E0130 21:18:50.092568 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:52 crc kubenswrapper[4721]: I0130 21:18:52.092036 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:52 crc kubenswrapper[4721]: I0130 21:18:52.092102 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:52 crc kubenswrapper[4721]: E0130 21:18:52.095676 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:52 crc kubenswrapper[4721]: I0130 21:18:52.096012 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:52 crc kubenswrapper[4721]: I0130 21:18:52.096266 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:52 crc kubenswrapper[4721]: E0130 21:18:52.096609 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:52 crc kubenswrapper[4721]: E0130 21:18:52.097442 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:52 crc kubenswrapper[4721]: E0130 21:18:52.098095 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:52 crc kubenswrapper[4721]: E0130 21:18:52.106716 4721 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 30 21:18:52 crc kubenswrapper[4721]: E0130 21:18:52.184952 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:18:54 crc kubenswrapper[4721]: I0130 21:18:54.091899 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:54 crc kubenswrapper[4721]: I0130 21:18:54.092020 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:54 crc kubenswrapper[4721]: E0130 21:18:54.092098 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:54 crc kubenswrapper[4721]: I0130 21:18:54.092151 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:54 crc kubenswrapper[4721]: E0130 21:18:54.092283 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:54 crc kubenswrapper[4721]: I0130 21:18:54.092384 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:54 crc kubenswrapper[4721]: E0130 21:18:54.092425 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:54 crc kubenswrapper[4721]: E0130 21:18:54.092570 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:55 crc kubenswrapper[4721]: I0130 21:18:55.093876 4721 scope.go:117] "RemoveContainer" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" Jan 30 21:18:55 crc kubenswrapper[4721]: I0130 21:18:55.806451 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/3.log" Jan 30 21:18:55 crc kubenswrapper[4721]: I0130 21:18:55.808862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerStarted","Data":"2d6a1248435f7b74f1093fccf453e5e79a2b66fb79882ee43d359ff897c280e2"} Jan 30 21:18:55 crc kubenswrapper[4721]: I0130 21:18:55.809281 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:18:55 crc kubenswrapper[4721]: I0130 21:18:55.841676 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podStartSLOduration=101.84166207 podStartE2EDuration="1m41.84166207s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:18:55.840706359 +0000 UTC m=+124.632607605" watchObservedRunningTime="2026-01-30 21:18:55.84166207 +0000 UTC m=+124.633563316" Jan 30 21:18:56 crc kubenswrapper[4721]: I0130 21:18:56.091699 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:56 crc kubenswrapper[4721]: I0130 21:18:56.091778 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:56 crc kubenswrapper[4721]: E0130 21:18:56.091849 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:56 crc kubenswrapper[4721]: I0130 21:18:56.091920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:56 crc kubenswrapper[4721]: E0130 21:18:56.092084 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:56 crc kubenswrapper[4721]: I0130 21:18:56.092105 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:56 crc kubenswrapper[4721]: E0130 21:18:56.092155 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:56 crc kubenswrapper[4721]: E0130 21:18:56.092213 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:56 crc kubenswrapper[4721]: I0130 21:18:56.192927 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-bkv95"] Jan 30 21:18:56 crc kubenswrapper[4721]: I0130 21:18:56.812065 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:56 crc kubenswrapper[4721]: E0130 21:18:56.812825 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:57 crc kubenswrapper[4721]: E0130 21:18:57.186774 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:18:58 crc kubenswrapper[4721]: I0130 21:18:58.092161 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:18:58 crc kubenswrapper[4721]: I0130 21:18:58.092195 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:18:58 crc kubenswrapper[4721]: I0130 21:18:58.092161 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:18:58 crc kubenswrapper[4721]: E0130 21:18:58.092290 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:18:58 crc kubenswrapper[4721]: I0130 21:18:58.092486 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:18:58 crc kubenswrapper[4721]: E0130 21:18:58.092498 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:18:58 crc kubenswrapper[4721]: E0130 21:18:58.092537 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:18:58 crc kubenswrapper[4721]: E0130 21:18:58.092591 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:18:59 crc kubenswrapper[4721]: I0130 21:18:59.092715 4721 scope.go:117] "RemoveContainer" containerID="0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04" Jan 30 21:18:59 crc kubenswrapper[4721]: I0130 21:18:59.822087 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/1.log" Jan 30 21:18:59 crc kubenswrapper[4721]: I0130 21:18:59.822153 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerStarted","Data":"b7bb92494e4fef088d7d6741d3a0314fed401e904d1675f21988157c35a6a12c"} Jan 30 21:19:00 crc kubenswrapper[4721]: I0130 21:19:00.091967 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:00 crc kubenswrapper[4721]: E0130 21:19:00.092099 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:19:00 crc kubenswrapper[4721]: I0130 21:19:00.091970 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:19:00 crc kubenswrapper[4721]: I0130 21:19:00.092142 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:19:00 crc kubenswrapper[4721]: E0130 21:19:00.092203 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:19:00 crc kubenswrapper[4721]: I0130 21:19:00.092251 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:00 crc kubenswrapper[4721]: E0130 21:19:00.092370 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:19:00 crc kubenswrapper[4721]: E0130 21:19:00.092528 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:19:02 crc kubenswrapper[4721]: I0130 21:19:02.091214 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:19:02 crc kubenswrapper[4721]: I0130 21:19:02.091255 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:19:02 crc kubenswrapper[4721]: I0130 21:19:02.092279 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:02 crc kubenswrapper[4721]: E0130 21:19:02.092270 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bkv95" podUID="19fca1ba-eb6d-479c-90ff-e55739aed640" Jan 30 21:19:02 crc kubenswrapper[4721]: I0130 21:19:02.092324 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:02 crc kubenswrapper[4721]: E0130 21:19:02.092498 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 21:19:02 crc kubenswrapper[4721]: E0130 21:19:02.092578 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 21:19:02 crc kubenswrapper[4721]: E0130 21:19:02.092631 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.091499 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.091535 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.091579 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.092178 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.096037 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.096103 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.096292 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.096372 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.096488 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 30 21:19:04 crc kubenswrapper[4721]: I0130 21:19:04.098481 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 30 21:19:10 crc kubenswrapper[4721]: I0130 21:19:10.920579 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.087718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.132933 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.133603 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-w99lk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.133953 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2b8tc"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.134111 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.134361 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.134770 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.135627 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-nddl4"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.136328 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.137559 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.138171 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.139111 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ktnwk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.139641 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.140071 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gs7nh"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.143678 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rhwvl"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.144066 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.144428 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.144766 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.145160 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.145567 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.145633 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-xvtnr"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.146324 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.146716 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.146848 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.146973 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.148057 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.148241 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.148508 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.148618 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.148811 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.149074 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.149119 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.149450 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.149639 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.150253 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.150400 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.150755 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.150896 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.151058 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.151219 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.151424 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.151829 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.151947 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.152049 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.152182 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.153387 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.154567 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.155283 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.155691 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.157159 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.157455 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.157653 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.157920 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158110 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158148 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158251 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158364 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158442 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158511 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158614 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.158955 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.159107 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.159889 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.159983 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-czsct"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.160118 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.160375 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.160390 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.160554 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.161369 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.161482 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.161563 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.161601 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.162477 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.162674 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.162678 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-th4nz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.163412 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.162801 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.162834 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.162906 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.165818 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.165942 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.166416 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.166792 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.168131 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.168784 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.168912 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.168990 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.169072 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.169194 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.169330 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.182657 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.182718 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.182803 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.183562 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-v7lbj"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.184163 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.184537 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.184582 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.185615 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.186477 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xltgq"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.187536 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.189901 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.196144 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.196144 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.201400 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.201566 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.201798 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.202554 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-nddl4"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203113 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203448 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203547 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203626 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203692 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203736 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203827 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203957 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203983 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204091 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204167 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204215 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204259 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204331 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204445 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204551 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204664 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204776 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204911 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.205021 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.205129 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.205435 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.205611 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.205724 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.205889 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.204095 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.206080 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.206326 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.203832 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.206667 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.211929 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.217059 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.214264 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.223749 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gs7nh"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.223769 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ktnwk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.223780 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-th4nz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.223792 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2b8tc"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.223802 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.218153 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.220153 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.225885 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.226760 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rhwvl"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.227035 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-v7lbj"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.227981 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xvtnr"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.229846 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.230376 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-czsct"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.231746 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-w99lk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.234265 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xltgq"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.234857 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.236547 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.257360 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281041 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281541 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d5b2021-f707-45bf-9890-d0ec722cd52f-serving-cert\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281577 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-encryption-config\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281603 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/834ba560-2a16-437b-8d57-20a0017ee78f-config\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281623 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70b27d89-4c43-455f-9084-434b226754a0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281664 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-etcd-serving-ca\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281696 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-serving-cert\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281722 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0aa4d8b0-73fa-4173-903c-c397c58652ef-node-pullsecrets\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281814 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfnbf\" (UniqueName: \"kubernetes.io/projected/c93c14cf-0b1f-419e-9282-d58e950c1e42-kube-api-access-hfnbf\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281867 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-config\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281914 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9p95\" (UniqueName: \"kubernetes.io/projected/9d5b2021-f707-45bf-9890-d0ec722cd52f-kube-api-access-v9p95\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281951 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c93c14cf-0b1f-419e-9282-d58e950c1e42-trusted-ca\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.281981 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/834ba560-2a16-437b-8d57-20a0017ee78f-images\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282049 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-oauth-config\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282084 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j42n\" (UniqueName: \"kubernetes.io/projected/715d346b-ba37-4920-a27b-5f9ef61133ef-kube-api-access-2j42n\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282117 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a09a10cf-7430-4d17-bac7-f68e69367526-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282156 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282193 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbkfp\" (UniqueName: \"kubernetes.io/projected/fd86db08-17aa-4451-a5c3-d27e252885b3-kube-api-access-wbkfp\") pod \"cluster-samples-operator-665b6dd947-lw9fr\" (UID: \"fd86db08-17aa-4451-a5c3-d27e252885b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282247 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-service-ca\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282278 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-service-ca-bundle\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282327 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282366 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0aa4d8b0-73fa-4173-903c-c397c58652ef-audit-dir\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282396 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f55fbb51-88ec-4429-beef-ed7d15f8b243-serving-cert\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282421 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-ca\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282453 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxc7l\" (UniqueName: \"kubernetes.io/projected/aa939e70-7c23-478e-9fca-ac0632a1295a-kube-api-access-xxc7l\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282485 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfc8981c-2d2c-41ef-b512-ad47319ca813-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282514 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-console-config\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282538 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkn2h\" (UniqueName: \"kubernetes.io/projected/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-kube-api-access-qkn2h\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282568 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-service-ca\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282598 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282627 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282660 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-etcd-client\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282687 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkvj5\" (UniqueName: \"kubernetes.io/projected/f55fbb51-88ec-4429-beef-ed7d15f8b243-kube-api-access-xkvj5\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282719 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-audit-policies\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282752 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20be5092-3da3-4d07-b481-239261c350da-serving-cert\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282783 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6kk\" (UniqueName: \"kubernetes.io/projected/8306877f-f9f0-4b02-943f-ab42d3f5f66a-kube-api-access-4h6kk\") pod \"downloads-7954f5f757-v7lbj\" (UID: \"8306877f-f9f0-4b02-943f-ab42d3f5f66a\") " pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282814 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282924 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282958 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m5g5\" (UniqueName: \"kubernetes.io/projected/834ba560-2a16-437b-8d57-20a0017ee78f-kube-api-access-9m5g5\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.282987 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283012 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7cvv\" (UniqueName: \"kubernetes.io/projected/9e2beb30-525e-45b7-858f-22d5f5f591d6-kube-api-access-r7cvv\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283042 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8bx6\" (UniqueName: \"kubernetes.io/projected/20be5092-3da3-4d07-b481-239261c350da-kube-api-access-b8bx6\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283074 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-dir\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283214 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wljsc\" (UniqueName: \"kubernetes.io/projected/a09a10cf-7430-4d17-bac7-f68e69367526-kube-api-access-wljsc\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283256 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-serving-cert\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283285 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czdtb\" (UniqueName: \"kubernetes.io/projected/90b8df67-ba46-4eea-8487-74e89cbf9a55-kube-api-access-czdtb\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283349 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgxhm\" (UniqueName: \"kubernetes.io/projected/70b27d89-4c43-455f-9084-434b226754a0-kube-api-access-qgxhm\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283424 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c93c14cf-0b1f-419e-9282-d58e950c1e42-serving-cert\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283457 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-config\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283514 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c93c14cf-0b1f-419e-9282-d58e950c1e42-config\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283552 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-encryption-config\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283579 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fd86db08-17aa-4451-a5c3-d27e252885b3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lw9fr\" (UID: \"fd86db08-17aa-4451-a5c3-d27e252885b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283617 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-etcd-client\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283647 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-trusted-ca-bundle\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283684 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a09a10cf-7430-4d17-bac7-f68e69367526-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283718 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-auth-proxy-config\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283746 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283779 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.283991 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe55c0cf-cc81-4cd9-94d8-5637539acba4-serving-cert\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284061 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-client-ca\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284109 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284191 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284326 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284366 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284394 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-client\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284445 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284485 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtcxr\" (UniqueName: \"kubernetes.io/projected/0aa4d8b0-73fa-4173-903c-c397c58652ef-kube-api-access-jtcxr\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284508 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsmdl\" (UniqueName: \"kubernetes.io/projected/fe55c0cf-cc81-4cd9-94d8-5637539acba4-kube-api-access-dsmdl\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284555 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-machine-approver-tls\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284676 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-image-import-ca\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284731 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-config\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284764 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f55fbb51-88ec-4429-beef-ed7d15f8b243-available-featuregates\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284797 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-config\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284838 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e4119979-962f-4ed5-9a63-b8498333259b-metrics-tls\") pod \"dns-operator-744455d44c-gs7nh\" (UID: \"e4119979-962f-4ed5-9a63-b8498333259b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284911 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/834ba560-2a16-437b-8d57-20a0017ee78f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.284979 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-serving-cert\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285012 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70b27d89-4c43-455f-9084-434b226754a0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285060 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-policies\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285108 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-trusted-ca-bundle\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-oauth-serving-cert\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285174 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-audit\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285203 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfc8981c-2d2c-41ef-b512-ad47319ca813-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285238 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e2beb30-525e-45b7-858f-22d5f5f591d6-audit-dir\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285360 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86gn5\" (UniqueName: \"kubernetes.io/projected/dfc8981c-2d2c-41ef-b512-ad47319ca813-kube-api-access-86gn5\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285391 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-client-ca\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285424 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a09a10cf-7430-4d17-bac7-f68e69367526-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285472 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90b8df67-ba46-4eea-8487-74e89cbf9a55-serving-cert\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285500 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-config\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwfx8\" (UniqueName: \"kubernetes.io/projected/e4119979-962f-4ed5-9a63-b8498333259b-kube-api-access-cwfx8\") pod \"dns-operator-744455d44c-gs7nh\" (UID: \"e4119979-962f-4ed5-9a63-b8498333259b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.285567 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-config\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.296514 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.316073 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.345435 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.356989 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.376704 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386244 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe55c0cf-cc81-4cd9-94d8-5637539acba4-serving-cert\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386317 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-client-ca\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386338 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386356 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386374 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386391 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386407 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-client\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386424 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsmdl\" (UniqueName: \"kubernetes.io/projected/fe55c0cf-cc81-4cd9-94d8-5637539acba4-kube-api-access-dsmdl\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386443 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-machine-approver-tls\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386460 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtcxr\" (UniqueName: \"kubernetes.io/projected/0aa4d8b0-73fa-4173-903c-c397c58652ef-kube-api-access-jtcxr\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386475 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-image-import-ca\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386489 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-config\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386504 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f55fbb51-88ec-4429-beef-ed7d15f8b243-available-featuregates\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386520 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-config\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386536 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e4119979-962f-4ed5-9a63-b8498333259b-metrics-tls\") pod \"dns-operator-744455d44c-gs7nh\" (UID: \"e4119979-962f-4ed5-9a63-b8498333259b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386553 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/834ba560-2a16-437b-8d57-20a0017ee78f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386568 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-policies\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386584 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-serving-cert\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386600 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70b27d89-4c43-455f-9084-434b226754a0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386615 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-trusted-ca-bundle\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386631 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-oauth-serving-cert\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386645 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-audit\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfc8981c-2d2c-41ef-b512-ad47319ca813-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386677 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e2beb30-525e-45b7-858f-22d5f5f591d6-audit-dir\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386694 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86gn5\" (UniqueName: \"kubernetes.io/projected/dfc8981c-2d2c-41ef-b512-ad47319ca813-kube-api-access-86gn5\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386709 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-client-ca\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386734 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a09a10cf-7430-4d17-bac7-f68e69367526-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386751 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90b8df67-ba46-4eea-8487-74e89cbf9a55-serving-cert\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386767 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-config\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwfx8\" (UniqueName: \"kubernetes.io/projected/e4119979-962f-4ed5-9a63-b8498333259b-kube-api-access-cwfx8\") pod \"dns-operator-744455d44c-gs7nh\" (UID: \"e4119979-962f-4ed5-9a63-b8498333259b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386800 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-config\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386818 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-encryption-config\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386834 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d5b2021-f707-45bf-9890-d0ec722cd52f-serving-cert\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386850 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/834ba560-2a16-437b-8d57-20a0017ee78f-config\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386877 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70b27d89-4c43-455f-9084-434b226754a0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386902 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-etcd-serving-ca\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386924 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-serving-cert\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386940 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0aa4d8b0-73fa-4173-903c-c397c58652ef-node-pullsecrets\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386953 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-config\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386970 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9p95\" (UniqueName: \"kubernetes.io/projected/9d5b2021-f707-45bf-9890-d0ec722cd52f-kube-api-access-v9p95\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.386988 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfnbf\" (UniqueName: \"kubernetes.io/projected/c93c14cf-0b1f-419e-9282-d58e950c1e42-kube-api-access-hfnbf\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387005 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c93c14cf-0b1f-419e-9282-d58e950c1e42-trusted-ca\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387020 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/834ba560-2a16-437b-8d57-20a0017ee78f-images\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387035 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387052 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j42n\" (UniqueName: \"kubernetes.io/projected/715d346b-ba37-4920-a27b-5f9ef61133ef-kube-api-access-2j42n\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a09a10cf-7430-4d17-bac7-f68e69367526-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387087 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-oauth-config\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387105 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387121 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbkfp\" (UniqueName: \"kubernetes.io/projected/fd86db08-17aa-4451-a5c3-d27e252885b3-kube-api-access-wbkfp\") pod \"cluster-samples-operator-665b6dd947-lw9fr\" (UID: \"fd86db08-17aa-4451-a5c3-d27e252885b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387136 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-service-ca-bundle\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387151 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387174 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-service-ca\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387191 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxc7l\" (UniqueName: \"kubernetes.io/projected/aa939e70-7c23-478e-9fca-ac0632a1295a-kube-api-access-xxc7l\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387206 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0aa4d8b0-73fa-4173-903c-c397c58652ef-audit-dir\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387220 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f55fbb51-88ec-4429-beef-ed7d15f8b243-serving-cert\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387238 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-ca\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387268 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387283 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfc8981c-2d2c-41ef-b512-ad47319ca813-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387312 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-console-config\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387328 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkn2h\" (UniqueName: \"kubernetes.io/projected/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-kube-api-access-qkn2h\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387345 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-service-ca\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387359 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkvj5\" (UniqueName: \"kubernetes.io/projected/f55fbb51-88ec-4429-beef-ed7d15f8b243-kube-api-access-xkvj5\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387375 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-audit-policies\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387392 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-etcd-client\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387406 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20be5092-3da3-4d07-b481-239261c350da-serving-cert\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387421 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6kk\" (UniqueName: \"kubernetes.io/projected/8306877f-f9f0-4b02-943f-ab42d3f5f66a-kube-api-access-4h6kk\") pod \"downloads-7954f5f757-v7lbj\" (UID: \"8306877f-f9f0-4b02-943f-ab42d3f5f66a\") " pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387436 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387454 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387487 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-dir\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387520 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m5g5\" (UniqueName: \"kubernetes.io/projected/834ba560-2a16-437b-8d57-20a0017ee78f-kube-api-access-9m5g5\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387556 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387572 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7cvv\" (UniqueName: \"kubernetes.io/projected/9e2beb30-525e-45b7-858f-22d5f5f591d6-kube-api-access-r7cvv\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387589 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8bx6\" (UniqueName: \"kubernetes.io/projected/20be5092-3da3-4d07-b481-239261c350da-kube-api-access-b8bx6\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387604 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czdtb\" (UniqueName: \"kubernetes.io/projected/90b8df67-ba46-4eea-8487-74e89cbf9a55-kube-api-access-czdtb\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387619 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgxhm\" (UniqueName: \"kubernetes.io/projected/70b27d89-4c43-455f-9084-434b226754a0-kube-api-access-qgxhm\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387637 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wljsc\" (UniqueName: \"kubernetes.io/projected/a09a10cf-7430-4d17-bac7-f68e69367526-kube-api-access-wljsc\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-serving-cert\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387676 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c93c14cf-0b1f-419e-9282-d58e950c1e42-serving-cert\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387690 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-config\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387705 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-etcd-client\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c93c14cf-0b1f-419e-9282-d58e950c1e42-config\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387738 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-encryption-config\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387754 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fd86db08-17aa-4451-a5c3-d27e252885b3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lw9fr\" (UID: \"fd86db08-17aa-4451-a5c3-d27e252885b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387788 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-trusted-ca-bundle\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387804 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a09a10cf-7430-4d17-bac7-f68e69367526-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387823 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-auth-proxy-config\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.387840 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.390075 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-dir\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.390637 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0aa4d8b0-73fa-4173-903c-c397c58652ef-audit-dir\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.390960 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-config\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.391120 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-service-ca\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.391919 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-service-ca-bundle\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.391991 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e2beb30-525e-45b7-858f-22d5f5f591d6-audit-dir\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.391123 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.392502 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-service-ca\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.393034 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/834ba560-2a16-437b-8d57-20a0017ee78f-config\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.393139 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-client-ca\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.393443 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a09a10cf-7430-4d17-bac7-f68e69367526-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394151 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-client-ca\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394330 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-config\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394447 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394490 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-console-config\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394769 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-ca\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394779 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.394910 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfc8981c-2d2c-41ef-b512-ad47319ca813-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395052 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-audit\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395256 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-etcd-serving-ca\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395277 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-trusted-ca-bundle\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395450 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395602 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0aa4d8b0-73fa-4173-903c-c397c58652ef-node-pullsecrets\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395800 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfc8981c-2d2c-41ef-b512-ad47319ca813-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395903 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c93c14cf-0b1f-419e-9282-d58e950c1e42-serving-cert\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.395833 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-config\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.396129 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f55fbb51-88ec-4429-beef-ed7d15f8b243-available-featuregates\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.396434 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-oauth-config\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.397375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a09a10cf-7430-4d17-bac7-f68e69367526-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.397392 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c93c14cf-0b1f-419e-9282-d58e950c1e42-config\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.397475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.397935 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70b27d89-4c43-455f-9084-434b226754a0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.398139 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20be5092-3da3-4d07-b481-239261c350da-config\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.398416 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-oauth-serving-cert\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.398446 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-trusted-ca-bundle\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.398476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c93c14cf-0b1f-419e-9282-d58e950c1e42-trusted-ca\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.398947 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fd86db08-17aa-4451-a5c3-d27e252885b3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lw9fr\" (UID: \"fd86db08-17aa-4451-a5c3-d27e252885b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399031 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399174 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399382 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90b8df67-ba46-4eea-8487-74e89cbf9a55-serving-cert\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399506 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-etcd-client\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399586 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399737 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/834ba560-2a16-437b-8d57-20a0017ee78f-images\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399973 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe55c0cf-cc81-4cd9-94d8-5637539acba4-serving-cert\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.399998 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-encryption-config\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.400196 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70b27d89-4c43-455f-9084-434b226754a0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.400835 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-serving-cert\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.400893 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-serving-cert\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.400918 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f55fbb51-88ec-4429-beef-ed7d15f8b243-serving-cert\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.401492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/834ba560-2a16-437b-8d57-20a0017ee78f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.401854 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d5b2021-f707-45bf-9890-d0ec722cd52f-serving-cert\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.401862 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90b8df67-ba46-4eea-8487-74e89cbf9a55-etcd-client\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.402204 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9e2beb30-525e-45b7-858f-22d5f5f591d6-encryption-config\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.402456 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e4119979-962f-4ed5-9a63-b8498333259b-metrics-tls\") pod \"dns-operator-744455d44c-gs7nh\" (UID: \"e4119979-962f-4ed5-9a63-b8498333259b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.403391 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20be5092-3da3-4d07-b481-239261c350da-serving-cert\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.403647 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-serving-cert\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.417070 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.436505 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.460056 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0aa4d8b0-73fa-4173-903c-c397c58652ef-etcd-client\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.460120 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-config\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.465282 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.492311 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.492840 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-config\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.493246 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0aa4d8b0-73fa-4173-903c-c397c58652ef-image-import-ca\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.493561 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.494166 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cm92z"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.494441 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.494603 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.494641 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.495121 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.495702 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.496246 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.496576 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-policies\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.496650 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.496816 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.497632 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-machine-approver-tls\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.498292 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-auth-proxy-config\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.502921 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.506560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e2beb30-525e-45b7-858f-22d5f5f591d6-audit-policies\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.506559 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.507217 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.507699 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.507920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.508178 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.508562 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.509495 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.510066 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.511497 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.512232 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.514104 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.514858 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.518970 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vhzw8"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.521173 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.524724 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-gbgkl"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.526096 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.527086 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-p6lgp"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.527828 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.532675 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.550896 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.551021 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.558871 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.560521 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.561090 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.562524 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.566384 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.566627 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.567965 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rs9bk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.569571 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.570288 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.570567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j42n\" (UniqueName: \"kubernetes.io/projected/715d346b-ba37-4920-a27b-5f9ef61133ef-kube-api-access-2j42n\") pod \"console-f9d7485db-xvtnr\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.570698 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.571453 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.571778 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.574621 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-lqxsf"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.575817 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.577976 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.586345 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-rknn2"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.586569 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.586882 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-r4652"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.587032 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.587887 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.588414 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-smcr8"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.588538 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.588571 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.588953 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a09a10cf-7430-4d17-bac7-f68e69367526-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.589111 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q9qsz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.589364 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.589455 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.589518 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.589987 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.591355 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592306 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-service-ca-bundle\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592434 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjb5t\" (UniqueName: \"kubernetes.io/projected/0b7a01e4-06c5-425c-8e68-d1b774fe74d2-kube-api-access-bjb5t\") pod \"package-server-manager-789f6589d5-vxmsm\" (UID: \"0b7a01e4-06c5-425c-8e68-d1b774fe74d2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592473 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-stats-auth\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592530 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592566 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592590 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0b7a01e4-06c5-425c-8e68-d1b774fe74d2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vxmsm\" (UID: \"0b7a01e4-06c5-425c-8e68-d1b774fe74d2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592615 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4zjv\" (UniqueName: \"kubernetes.io/projected/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-kube-api-access-g4zjv\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592647 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/95250d0b-0b56-458d-9476-0763808f1735-metrics-tls\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592672 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-config\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592733 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-metrics-certs\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592754 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95250d0b-0b56-458d-9476-0763808f1735-config-volume\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592776 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-default-certificate\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592805 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cm92z"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.592898 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8hfl\" (UniqueName: \"kubernetes.io/projected/95250d0b-0b56-458d-9476-0763808f1735-kube-api-access-r8hfl\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.593923 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.594651 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.597643 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gbgkl"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.599132 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vhzw8"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.600265 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.602075 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.603447 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.603631 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8bx6\" (UniqueName: \"kubernetes.io/projected/20be5092-3da3-4d07-b481-239261c350da-kube-api-access-b8bx6\") pod \"authentication-operator-69f744f599-th4nz\" (UID: \"20be5092-3da3-4d07-b481-239261c350da\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.604445 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.605622 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-r4652"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.605982 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.608739 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.608774 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q9qsz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.608789 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.609607 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.610668 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.611715 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rs9bk"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.612684 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.613331 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m5g5\" (UniqueName: \"kubernetes.io/projected/834ba560-2a16-437b-8d57-20a0017ee78f-kube-api-access-9m5g5\") pod \"machine-api-operator-5694c8668f-rhwvl\" (UID: \"834ba560-2a16-437b-8d57-20a0017ee78f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.613814 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-smcr8"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.615165 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-rknn2"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.633567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czdtb\" (UniqueName: \"kubernetes.io/projected/90b8df67-ba46-4eea-8487-74e89cbf9a55-kube-api-access-czdtb\") pod \"etcd-operator-b45778765-ktnwk\" (UID: \"90b8df67-ba46-4eea-8487-74e89cbf9a55\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.650828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgxhm\" (UniqueName: \"kubernetes.io/projected/70b27d89-4c43-455f-9084-434b226754a0-kube-api-access-qgxhm\") pod \"openshift-apiserver-operator-796bbdcf4f-2zn7w\" (UID: \"70b27d89-4c43-455f-9084-434b226754a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.676002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wljsc\" (UniqueName: \"kubernetes.io/projected/a09a10cf-7430-4d17-bac7-f68e69367526-kube-api-access-wljsc\") pod \"cluster-image-registry-operator-dc59b4c8b-8hxlg\" (UID: \"a09a10cf-7430-4d17-bac7-f68e69367526\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.692911 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxc7l\" (UniqueName: \"kubernetes.io/projected/aa939e70-7c23-478e-9fca-ac0632a1295a-kube-api-access-xxc7l\") pod \"oauth-openshift-558db77b4-xltgq\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693400 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0b7a01e4-06c5-425c-8e68-d1b774fe74d2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vxmsm\" (UID: \"0b7a01e4-06c5-425c-8e68-d1b774fe74d2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693427 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4zjv\" (UniqueName: \"kubernetes.io/projected/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-kube-api-access-g4zjv\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693446 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-config\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693462 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/95250d0b-0b56-458d-9476-0763808f1735-metrics-tls\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693525 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-metrics-certs\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693541 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95250d0b-0b56-458d-9476-0763808f1735-config-volume\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-default-certificate\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693589 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8hfl\" (UniqueName: \"kubernetes.io/projected/95250d0b-0b56-458d-9476-0763808f1735-kube-api-access-r8hfl\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693658 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-service-ca-bundle\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693685 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjb5t\" (UniqueName: \"kubernetes.io/projected/0b7a01e4-06c5-425c-8e68-d1b774fe74d2-kube-api-access-bjb5t\") pod \"package-server-manager-789f6589d5-vxmsm\" (UID: \"0b7a01e4-06c5-425c-8e68-d1b774fe74d2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693703 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-stats-auth\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693729 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.693746 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.714586 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7cvv\" (UniqueName: \"kubernetes.io/projected/9e2beb30-525e-45b7-858f-22d5f5f591d6-kube-api-access-r7cvv\") pod \"apiserver-7bbb656c7d-28slp\" (UID: \"9e2beb30-525e-45b7-858f-22d5f5f591d6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.732279 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbkfp\" (UniqueName: \"kubernetes.io/projected/fd86db08-17aa-4451-a5c3-d27e252885b3-kube-api-access-wbkfp\") pod \"cluster-samples-operator-665b6dd947-lw9fr\" (UID: \"fd86db08-17aa-4451-a5c3-d27e252885b3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.749644 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkvj5\" (UniqueName: \"kubernetes.io/projected/f55fbb51-88ec-4429-beef-ed7d15f8b243-kube-api-access-xkvj5\") pod \"openshift-config-operator-7777fb866f-w99lk\" (UID: \"f55fbb51-88ec-4429-beef-ed7d15f8b243\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.769894 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.770959 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkn2h\" (UniqueName: \"kubernetes.io/projected/1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326-kube-api-access-qkn2h\") pod \"machine-approver-56656f9798-w7pjl\" (UID: \"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.802162 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86gn5\" (UniqueName: \"kubernetes.io/projected/dfc8981c-2d2c-41ef-b512-ad47319ca813-kube-api-access-86gn5\") pod \"openshift-controller-manager-operator-756b6f6bc6-kh9xb\" (UID: \"dfc8981c-2d2c-41ef-b512-ad47319ca813\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.815715 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwfx8\" (UniqueName: \"kubernetes.io/projected/e4119979-962f-4ed5-9a63-b8498333259b-kube-api-access-cwfx8\") pod \"dns-operator-744455d44c-gs7nh\" (UID: \"e4119979-962f-4ed5-9a63-b8498333259b\") " pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.823508 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.833432 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9p95\" (UniqueName: \"kubernetes.io/projected/9d5b2021-f707-45bf-9890-d0ec722cd52f-kube-api-access-v9p95\") pod \"controller-manager-879f6c89f-2b8tc\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.843228 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.850948 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.858248 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsmdl\" (UniqueName: \"kubernetes.io/projected/fe55c0cf-cc81-4cd9-94d8-5637539acba4-kube-api-access-dsmdl\") pod \"route-controller-manager-6576b87f9c-pt27j\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.867520 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.874278 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtcxr\" (UniqueName: \"kubernetes.io/projected/0aa4d8b0-73fa-4173-903c-c397c58652ef-kube-api-access-jtcxr\") pod \"apiserver-76f77b778f-nddl4\" (UID: \"0aa4d8b0-73fa-4173-903c-c397c58652ef\") " pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.881424 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.890611 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfnbf\" (UniqueName: \"kubernetes.io/projected/c93c14cf-0b1f-419e-9282-d58e950c1e42-kube-api-access-hfnbf\") pod \"console-operator-58897d9998-czsct\" (UID: \"c93c14cf-0b1f-419e-9282-d58e950c1e42\") " pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.890924 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.899764 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.912929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6kk\" (UniqueName: \"kubernetes.io/projected/8306877f-f9f0-4b02-943f-ab42d3f5f66a-kube-api-access-4h6kk\") pod \"downloads-7954f5f757-v7lbj\" (UID: \"8306877f-f9f0-4b02-943f-ab42d3f5f66a\") " pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.913092 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.917258 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.923124 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.928570 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.935766 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.938580 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.942845 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp"] Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.960084 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.977168 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: W0130 21:19:11.985585 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e55c467_ce3b_40a2_bfd7_eb5b8a7dd326.slice/crio-8d4871d774899f10a380ca59958bdf0467a9423ca791937a7fb6cd0ff96ec4ca WatchSource:0}: Error finding container 8d4871d774899f10a380ca59958bdf0467a9423ca791937a7fb6cd0ff96ec4ca: Status 404 returned error can't find the container with id 8d4871d774899f10a380ca59958bdf0467a9423ca791937a7fb6cd0ff96ec4ca Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.998956 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 30 21:19:11 crc kubenswrapper[4721]: I0130 21:19:11.999852 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-th4nz"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.001215 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xvtnr"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.016489 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.053884 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.054537 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-w99lk"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.064721 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0b7a01e4-06c5-425c-8e68-d1b774fe74d2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vxmsm\" (UID: \"0b7a01e4-06c5-425c-8e68-d1b774fe74d2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.065220 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.082218 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 30 21:19:12 crc kubenswrapper[4721]: W0130 21:19:12.082879 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf55fbb51_88ec_4429_beef_ed7d15f8b243.slice/crio-8b10cbd9704f2cbecfb8c1c6adc7dcf0280d3d0d60f3e005d5971e938ac3841a WatchSource:0}: Error finding container 8b10cbd9704f2cbecfb8c1c6adc7dcf0280d3d0d60f3e005d5971e938ac3841a: Status 404 returned error can't find the container with id 8b10cbd9704f2cbecfb8c1c6adc7dcf0280d3d0d60f3e005d5971e938ac3841a Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.096480 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.106738 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.114179 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.129039 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.137375 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.137511 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.157664 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.160559 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.177894 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.182536 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ktnwk"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.197401 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.216874 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.228045 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-config\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.238478 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.238551 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rhwvl"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.240048 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.250780 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.256672 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.278954 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.305050 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.319076 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.323663 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.339811 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.359454 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.397742 4721 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.416240 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.425753 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/95250d0b-0b56-458d-9476-0763808f1735-config-volume\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.439057 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.456774 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.470100 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/95250d0b-0b56-458d-9476-0763808f1735-metrics-tls\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.476919 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.496385 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.513721 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-metrics-certs\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.518658 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.543439 4721 request.go:700] Waited for 1.015198854s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-certs-default&limit=500&resourceVersion=0 Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.548191 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.601119 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.602458 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gs7nh"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.603392 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.604851 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-service-ca-bundle\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.605344 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.613977 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-stats-auth\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.637465 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.641504 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.652555 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-default-certificate\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.669188 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xltgq"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.669259 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.675060 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-czsct"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.676039 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.682255 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.695338 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-nddl4"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.698026 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.718371 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.728931 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.738076 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.741253 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-v7lbj"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.747274 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2b8tc"] Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.756703 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: W0130 21:19:12.771982 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8306877f_f9f0_4b02_943f_ab42d3f5f66a.slice/crio-d434a0268aaaca54a1cdd317a4c911c60feb83bf4eeae393836af42e6b14eda6 WatchSource:0}: Error finding container d434a0268aaaca54a1cdd317a4c911c60feb83bf4eeae393836af42e6b14eda6: Status 404 returned error can't find the container with id d434a0268aaaca54a1cdd317a4c911c60feb83bf4eeae393836af42e6b14eda6 Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.776612 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.803184 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.816840 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.837036 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: W0130 21:19:12.844130 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d5b2021_f707_45bf_9890_d0ec722cd52f.slice/crio-b45760c8ed6be0a742ab04b0c7e58873dc6fdf2f6fd8f951e1e554a4a463b6a9 WatchSource:0}: Error finding container b45760c8ed6be0a742ab04b0c7e58873dc6fdf2f6fd8f951e1e554a4a463b6a9: Status 404 returned error can't find the container with id b45760c8ed6be0a742ab04b0c7e58873dc6fdf2f6fd8f951e1e554a4a463b6a9 Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.857278 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.877737 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.891313 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" event={"ID":"90b8df67-ba46-4eea-8487-74e89cbf9a55","Type":"ContainerStarted","Data":"68c20e278e9f3347da32f62c14af76529f66a33b8a6b7f8915b25e4483016073"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.891359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" event={"ID":"90b8df67-ba46-4eea-8487-74e89cbf9a55","Type":"ContainerStarted","Data":"00eccc586b92c344022900a077a00801cc61d4b43a4adb9ad0ca61e6d05ed7bd"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.896267 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.904342 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" event={"ID":"834ba560-2a16-437b-8d57-20a0017ee78f","Type":"ContainerStarted","Data":"fbb1e445e1903d51d9bc328033e21657e9b6568dc4e503208f990446034ac761"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.904401 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" event={"ID":"834ba560-2a16-437b-8d57-20a0017ee78f","Type":"ContainerStarted","Data":"42abb6e20396a1d3d86c0645f1559a20297407abf40dde818cadd989889d238d"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.907219 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" event={"ID":"dfc8981c-2d2c-41ef-b512-ad47319ca813","Type":"ContainerStarted","Data":"c08cca1e3e330150c009e521bd6d2b9baaeec011b892e6948da9304a2ca59fd3"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.907283 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" event={"ID":"dfc8981c-2d2c-41ef-b512-ad47319ca813","Type":"ContainerStarted","Data":"6100fcedfc75ed5e8b1b2fe664c0b2a9d4cb82ec52bfe2181a3ed1c6e545cbea"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.909453 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" event={"ID":"0aa4d8b0-73fa-4173-903c-c397c58652ef","Type":"ContainerStarted","Data":"8f9bd2f09daaf0661365a16fce8bd2f2842ab54f4a9f2f0afcc0b57b4dd3980e"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.910762 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" event={"ID":"70b27d89-4c43-455f-9084-434b226754a0","Type":"ContainerStarted","Data":"f0c32fe1ff37e514d164ef332bbdad12b7f0f9940c5ac4f5598434fc9ef9a379"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.910803 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" event={"ID":"70b27d89-4c43-455f-9084-434b226754a0","Type":"ContainerStarted","Data":"bd504e941c19c23cfccf95495668d2abb50631f91b02966d119c661c44188763"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.913068 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" event={"ID":"20be5092-3da3-4d07-b481-239261c350da","Type":"ContainerStarted","Data":"4b72902eb0a69e592ca75bddcf23f6cd02329662c5d5edf2842e06cb532416d3"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.913097 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" event={"ID":"20be5092-3da3-4d07-b481-239261c350da","Type":"ContainerStarted","Data":"81c8ee32427fe2c96d23dde20c2426fd2c0ffd25b7c21d36f24107a0b5a21d53"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.914150 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" event={"ID":"a09a10cf-7430-4d17-bac7-f68e69367526","Type":"ContainerStarted","Data":"c416dd11c09ec88142dabdaaa74a27c6acf7c31dbd468dad09aec0be327183b5"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.915624 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-v7lbj" event={"ID":"8306877f-f9f0-4b02-943f-ab42d3f5f66a","Type":"ContainerStarted","Data":"d434a0268aaaca54a1cdd317a4c911c60feb83bf4eeae393836af42e6b14eda6"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.916702 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" event={"ID":"9d5b2021-f707-45bf-9890-d0ec722cd52f","Type":"ContainerStarted","Data":"b45760c8ed6be0a742ab04b0c7e58873dc6fdf2f6fd8f951e1e554a4a463b6a9"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.916868 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.918221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" event={"ID":"e4119979-962f-4ed5-9a63-b8498333259b","Type":"ContainerStarted","Data":"d2dfe9e15f73f763086421edb5f04a88d55ee4b407091133081e383fbe97e151"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.925851 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" event={"ID":"aa939e70-7c23-478e-9fca-ac0632a1295a","Type":"ContainerStarted","Data":"ab36736f1b81bc7d068b808c9813f2700bd4503ce24610136455262e81b24715"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.927778 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xvtnr" event={"ID":"715d346b-ba37-4920-a27b-5f9ef61133ef","Type":"ContainerStarted","Data":"69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.927805 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xvtnr" event={"ID":"715d346b-ba37-4920-a27b-5f9ef61133ef","Type":"ContainerStarted","Data":"252a233dcc9ac00ba17477057837a8939321e0a52fe841c029ccdf13b1a69a22"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.933113 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-czsct" event={"ID":"c93c14cf-0b1f-419e-9282-d58e950c1e42","Type":"ContainerStarted","Data":"f3f9aac5bb8ec746ff1435cbdb3c1506958140101793f9bfc5b295fb1282539b"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.934837 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" event={"ID":"fd86db08-17aa-4451-a5c3-d27e252885b3","Type":"ContainerStarted","Data":"95e656b6b134672a7be29bdcb0484239b47661c463bff4d4a4fea6607f089ac6"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.936441 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.936507 4721 generic.go:334] "Generic (PLEG): container finished" podID="9e2beb30-525e-45b7-858f-22d5f5f591d6" containerID="0ecb50ca95687c6181d09245a1700220df21660e8ca5e67e4d8c910329462171" exitCode=0 Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.936614 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" event={"ID":"9e2beb30-525e-45b7-858f-22d5f5f591d6","Type":"ContainerDied","Data":"0ecb50ca95687c6181d09245a1700220df21660e8ca5e67e4d8c910329462171"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.936655 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" event={"ID":"9e2beb30-525e-45b7-858f-22d5f5f591d6","Type":"ContainerStarted","Data":"f37e14e87ea11141ac642cc8a589c3eb8714812d1c7de72957ee504eb1f6a7ba"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.942600 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" event={"ID":"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326","Type":"ContainerStarted","Data":"89e717117aa848cddb14fc78da589c9d25ae99ded871d1e06294d63bbf7f30b5"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.942653 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" event={"ID":"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326","Type":"ContainerStarted","Data":"8d4871d774899f10a380ca59958bdf0467a9423ca791937a7fb6cd0ff96ec4ca"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.952671 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" event={"ID":"fe55c0cf-cc81-4cd9-94d8-5637539acba4","Type":"ContainerStarted","Data":"593940af64e56c2e48b19bb96d748346f047e4cf5973771d2d42ef6d4cd82f60"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.957172 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.958712 4721 generic.go:334] "Generic (PLEG): container finished" podID="f55fbb51-88ec-4429-beef-ed7d15f8b243" containerID="67da240208307866277272cb1518d0120a31525830cd01b089d96649a09ccb2c" exitCode=0 Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.958767 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" event={"ID":"f55fbb51-88ec-4429-beef-ed7d15f8b243","Type":"ContainerDied","Data":"67da240208307866277272cb1518d0120a31525830cd01b089d96649a09ccb2c"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.958797 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" event={"ID":"f55fbb51-88ec-4429-beef-ed7d15f8b243","Type":"ContainerStarted","Data":"8b10cbd9704f2cbecfb8c1c6adc7dcf0280d3d0d60f3e005d5971e938ac3841a"} Jan 30 21:19:12 crc kubenswrapper[4721]: I0130 21:19:12.977662 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:12.997379 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.017112 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.037591 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.059921 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.077553 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.102173 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.119679 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.138330 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.156704 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.179245 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.197735 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.217891 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.237498 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.258728 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.277365 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.298091 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.317424 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.336404 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.357014 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.381536 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.398077 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.416588 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.436214 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.457672 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.475945 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.497594 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.517751 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.543328 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.554606 4721 request.go:700] Waited for 1.964780528s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.556553 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.576336 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.615659 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4j4cn\" (UID: \"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.633937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4zjv\" (UniqueName: \"kubernetes.io/projected/5c12bf80-90a7-457e-ad04-e0b55d28e2bf-kube-api-access-g4zjv\") pod \"router-default-5444994796-p6lgp\" (UID: \"5c12bf80-90a7-457e-ad04-e0b55d28e2bf\") " pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.652860 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8hfl\" (UniqueName: \"kubernetes.io/projected/95250d0b-0b56-458d-9476-0763808f1735-kube-api-access-r8hfl\") pod \"dns-default-gbgkl\" (UID: \"95250d0b-0b56-458d-9476-0763808f1735\") " pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.672996 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjb5t\" (UniqueName: \"kubernetes.io/projected/0b7a01e4-06c5-425c-8e68-d1b774fe74d2-kube-api-access-bjb5t\") pod \"package-server-manager-789f6589d5-vxmsm\" (UID: \"0b7a01e4-06c5-425c-8e68-d1b774fe74d2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.722675 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.730964 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nztp4\" (UniqueName: \"kubernetes.io/projected/6a1a4e32-9874-4126-94e1-b741a6bd20c1-kube-api-access-nztp4\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731019 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jshkx\" (UniqueName: \"kubernetes.io/projected/93862821-84b5-4736-b516-73b6b69064ff-kube-api-access-jshkx\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731045 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-installation-pull-secrets\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731071 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5bwr\" (UniqueName: \"kubernetes.io/projected/67324f1b-9004-416f-82a6-5758c069a111-kube-api-access-d5bwr\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731098 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-plugins-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731122 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/67324f1b-9004-416f-82a6-5758c069a111-profile-collector-cert\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-certificates\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731168 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c8c5d710-b5ec-4998-899c-14cac760d429-proxy-tls\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731524 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-registration-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731583 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-csi-data-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731621 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-bound-sa-token\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731649 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6a1a4e32-9874-4126-94e1-b741a6bd20c1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-trusted-ca\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731790 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6a1a4e32-9874-4126-94e1-b741a6bd20c1-metrics-tls\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731832 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/714b4cdb-f9a5-4d74-b46f-21e6167f1807-apiservice-cert\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731891 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-ca-trust-extracted\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.731922 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a1a4e32-9874-4126-94e1-b741a6bd20c1-trusted-ca\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732024 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-socket-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732103 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm7cz\" (UniqueName: \"kubernetes.io/projected/c8c5d710-b5ec-4998-899c-14cac760d429-kube-api-access-jm7cz\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732155 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/714b4cdb-f9a5-4d74-b46f-21e6167f1807-tmpfs\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732199 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732230 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/714b4cdb-f9a5-4d74-b46f-21e6167f1807-webhook-cert\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732253 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-mountpoint-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732276 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdbnn\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-kube-api-access-vdbnn\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732322 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n246h\" (UniqueName: \"kubernetes.io/projected/714b4cdb-f9a5-4d74-b46f-21e6167f1807-kube-api-access-n246h\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732345 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/67324f1b-9004-416f-82a6-5758c069a111-srv-cert\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732367 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-tls\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.732392 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c8c5d710-b5ec-4998-899c-14cac760d429-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: E0130 21:19:13.732968 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.23294552 +0000 UTC m=+143.024846766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.788987 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.808161 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835279 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835532 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835560 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-signing-key\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835643 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-bound-sa-token\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6a1a4e32-9874-4126-94e1-b741a6bd20c1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835684 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-registration-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835715 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-csi-data-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835779 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-trusted-ca\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835800 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljqxv\" (UniqueName: \"kubernetes.io/projected/10335cae-c54e-4bf1-b41c-6df530ac47dc-kube-api-access-ljqxv\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835846 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6a1a4e32-9874-4126-94e1-b741a6bd20c1-metrics-tls\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835870 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/714b4cdb-f9a5-4d74-b46f-21e6167f1807-apiservice-cert\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835891 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8rcx\" (UniqueName: \"kubernetes.io/projected/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-kube-api-access-q8rcx\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835924 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9557d61f-1030-4412-87ec-372c7b26f9b2-profile-collector-cert\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835944 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-ca-trust-extracted\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835962 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a1a4e32-9874-4126-94e1-b741a6bd20c1-trusted-ca\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.835982 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836011 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw2sp\" (UniqueName: \"kubernetes.io/projected/364de1a3-22d0-4012-ac93-def721ef2851-kube-api-access-vw2sp\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836031 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-signing-cabundle\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836051 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39508b8-299a-40c7-872b-9cffd0d4ad11-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836069 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6167606-771c-4d04-840f-17a8998a71a4-serving-cert\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836113 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-socket-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836136 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.836190 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm7cz\" (UniqueName: \"kubernetes.io/projected/c8c5d710-b5ec-4998-899c-14cac760d429-kube-api-access-jm7cz\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.837138 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-ca-trust-extracted\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: W0130 21:19:13.836117 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c12bf80_90a7_457e_ad04_e0b55d28e2bf.slice/crio-093050870b2c40587ae2fae69ab607e8caeada6e9e16dca4847bbe6aa7b34229 WatchSource:0}: Error finding container 093050870b2c40587ae2fae69ab607e8caeada6e9e16dca4847bbe6aa7b34229: Status 404 returned error can't find the container with id 093050870b2c40587ae2fae69ab607e8caeada6e9e16dca4847bbe6aa7b34229 Jan 30 21:19:13 crc kubenswrapper[4721]: E0130 21:19:13.836327 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.336275925 +0000 UTC m=+143.128177181 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.837564 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/714b4cdb-f9a5-4d74-b46f-21e6167f1807-tmpfs\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.838400 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-registration-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.838416 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-socket-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.838492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a1a4e32-9874-4126-94e1-b741a6bd20c1-trusted-ca\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.838635 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-csi-data-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.839488 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.839642 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a39508b8-299a-40c7-872b-9cffd0d4ad11-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.842434 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-trusted-ca\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: E0130 21:19:13.842808 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.342789469 +0000 UTC m=+143.134690715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.842941 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45d6565a-19c0-4077-a8b2-727014145e16-cert\") pod \"ingress-canary-rknn2\" (UID: \"45d6565a-19c0-4077-a8b2-727014145e16\") " pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.843281 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4764t\" (UniqueName: \"kubernetes.io/projected/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-kube-api-access-4764t\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.843339 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/714b4cdb-f9a5-4d74-b46f-21e6167f1807-tmpfs\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.843775 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/714b4cdb-f9a5-4d74-b46f-21e6167f1807-webhook-cert\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.844534 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6a1a4e32-9874-4126-94e1-b741a6bd20c1-metrics-tls\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.844917 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/364de1a3-22d0-4012-ac93-def721ef2851-proxy-tls\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.848330 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/714b4cdb-f9a5-4d74-b46f-21e6167f1807-webhook-cert\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.850237 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/714b4cdb-f9a5-4d74-b46f-21e6167f1807-apiservice-cert\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.855520 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-mountpoint-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.856794 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/36b59aa6-8bde-4935-82ce-04ef6d8ec10c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m2qpz\" (UID: \"36b59aa6-8bde-4935-82ce-04ef6d8ec10c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.857660 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdbnn\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-kube-api-access-vdbnn\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.858999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a39508b8-299a-40c7-872b-9cffd0d4ad11-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859076 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57505fbf-a7f7-45ff-91bb-f3463567721e-config-volume\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859137 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c98j2\" (UniqueName: \"kubernetes.io/projected/45d6565a-19c0-4077-a8b2-727014145e16-kube-api-access-c98j2\") pod \"ingress-canary-rknn2\" (UID: \"45d6565a-19c0-4077-a8b2-727014145e16\") " pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859230 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n246h\" (UniqueName: \"kubernetes.io/projected/714b4cdb-f9a5-4d74-b46f-21e6167f1807-kube-api-access-n246h\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859290 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvqmx\" (UniqueName: \"kubernetes.io/projected/4886a538-d346-47dc-a6cb-184ae7d015b9-kube-api-access-hvqmx\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.858375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-mountpoint-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859643 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/67324f1b-9004-416f-82a6-5758c069a111-srv-cert\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859746 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-tls\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.859859 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/364de1a3-22d0-4012-ac93-def721ef2851-images\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860051 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5txhq\" (UniqueName: \"kubernetes.io/projected/d298abde-996b-4c31-a26c-474a2b9e8742-kube-api-access-5txhq\") pod \"multus-admission-controller-857f4d67dd-rs9bk\" (UID: \"d298abde-996b-4c31-a26c-474a2b9e8742\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860156 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8mbf\" (UniqueName: \"kubernetes.io/projected/f6167606-771c-4d04-840f-17a8998a71a4-kube-api-access-p8mbf\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860200 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tf54\" (UniqueName: \"kubernetes.io/projected/9557d61f-1030-4412-87ec-372c7b26f9b2-kube-api-access-9tf54\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860401 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-config\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860533 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c8c5d710-b5ec-4998-899c-14cac760d429-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860658 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctlcc\" (UniqueName: \"kubernetes.io/projected/36b59aa6-8bde-4935-82ce-04ef6d8ec10c-kube-api-access-ctlcc\") pod \"control-plane-machine-set-operator-78cbb6b69f-m2qpz\" (UID: \"36b59aa6-8bde-4935-82ce-04ef6d8ec10c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860727 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57505fbf-a7f7-45ff-91bb-f3463567721e-secret-volume\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.860854 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-node-bootstrap-token\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.862129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nztp4\" (UniqueName: \"kubernetes.io/projected/6a1a4e32-9874-4126-94e1-b741a6bd20c1-kube-api-access-nztp4\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.862320 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc5qk\" (UniqueName: \"kubernetes.io/projected/57505fbf-a7f7-45ff-91bb-f3463567721e-kube-api-access-bc5qk\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.862539 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jshkx\" (UniqueName: \"kubernetes.io/projected/93862821-84b5-4736-b516-73b6b69064ff-kube-api-access-jshkx\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.862611 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-certs\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.862640 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c8c5d710-b5ec-4998-899c-14cac760d429-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.862866 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/364de1a3-22d0-4012-ac93-def721ef2851-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.863075 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-installation-pull-secrets\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.863127 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5bwr\" (UniqueName: \"kubernetes.io/projected/67324f1b-9004-416f-82a6-5758c069a111-kube-api-access-d5bwr\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.863160 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqzp8\" (UniqueName: \"kubernetes.io/projected/85b3072c-c7f7-41e4-a661-c8cdeb4cd811-kube-api-access-qqzp8\") pod \"migrator-59844c95c7-cwrtc\" (UID: \"85b3072c-c7f7-41e4-a661-c8cdeb4cd811\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.863188 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.863214 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6167606-771c-4d04-840f-17a8998a71a4-config\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4886a538-d346-47dc-a6cb-184ae7d015b9-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864130 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-plugins-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864166 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4886a538-d346-47dc-a6cb-184ae7d015b9-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864255 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9557d61f-1030-4412-87ec-372c7b26f9b2-srv-cert\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864321 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/93862821-84b5-4736-b516-73b6b69064ff-plugins-dir\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864424 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/67324f1b-9004-416f-82a6-5758c069a111-profile-collector-cert\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.864611 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-certificates\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.865356 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c8c5d710-b5ec-4998-899c-14cac760d429-proxy-tls\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.865452 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d298abde-996b-4c31-a26c-474a2b9e8742-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rs9bk\" (UID: \"d298abde-996b-4c31-a26c-474a2b9e8742\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.867829 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-certificates\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.870772 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/67324f1b-9004-416f-82a6-5758c069a111-profile-collector-cert\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.871114 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-installation-pull-secrets\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.872011 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/67324f1b-9004-416f-82a6-5758c069a111-srv-cert\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.876182 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c8c5d710-b5ec-4998-899c-14cac760d429-proxy-tls\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.879841 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-tls\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.884375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm7cz\" (UniqueName: \"kubernetes.io/projected/c8c5d710-b5ec-4998-899c-14cac760d429-kube-api-access-jm7cz\") pod \"machine-config-controller-84d6567774-5w55t\" (UID: \"c8c5d710-b5ec-4998-899c-14cac760d429\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.894722 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-bound-sa-token\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.925266 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6a1a4e32-9874-4126-94e1-b741a6bd20c1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.959792 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.960475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdbnn\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-kube-api-access-vdbnn\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.969420 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" event={"ID":"fd86db08-17aa-4451-a5c3-d27e252885b3","Type":"ContainerStarted","Data":"78ac1f179fb271093c45094671d8220b84e63cfd1c4116927b07de2c61151a54"} Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.969486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" event={"ID":"fd86db08-17aa-4451-a5c3-d27e252885b3","Type":"ContainerStarted","Data":"94961c690475d59639616ffbc0b74d5b2dd863a62c568d2251da22136e098861"} Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.970826 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971070 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971095 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6167606-771c-4d04-840f-17a8998a71a4-config\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971120 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4886a538-d346-47dc-a6cb-184ae7d015b9-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4886a538-d346-47dc-a6cb-184ae7d015b9-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: E0130 21:19:13.971175 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.471145975 +0000 UTC m=+143.263047371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971221 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9557d61f-1030-4412-87ec-372c7b26f9b2-srv-cert\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971283 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d298abde-996b-4c31-a26c-474a2b9e8742-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rs9bk\" (UID: \"d298abde-996b-4c31-a26c-474a2b9e8742\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971335 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971358 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-signing-key\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljqxv\" (UniqueName: \"kubernetes.io/projected/10335cae-c54e-4bf1-b41c-6df530ac47dc-kube-api-access-ljqxv\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971466 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8rcx\" (UniqueName: \"kubernetes.io/projected/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-kube-api-access-q8rcx\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971489 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9557d61f-1030-4412-87ec-372c7b26f9b2-profile-collector-cert\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971517 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971543 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw2sp\" (UniqueName: \"kubernetes.io/projected/364de1a3-22d0-4012-ac93-def721ef2851-kube-api-access-vw2sp\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-signing-cabundle\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971601 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39508b8-299a-40c7-872b-9cffd0d4ad11-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971623 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6167606-771c-4d04-840f-17a8998a71a4-serving-cert\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971655 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971743 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a39508b8-299a-40c7-872b-9cffd0d4ad11-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971775 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45d6565a-19c0-4077-a8b2-727014145e16-cert\") pod \"ingress-canary-rknn2\" (UID: \"45d6565a-19c0-4077-a8b2-727014145e16\") " pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971800 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4764t\" (UniqueName: \"kubernetes.io/projected/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-kube-api-access-4764t\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971834 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/364de1a3-22d0-4012-ac93-def721ef2851-proxy-tls\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971868 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/36b59aa6-8bde-4935-82ce-04ef6d8ec10c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m2qpz\" (UID: \"36b59aa6-8bde-4935-82ce-04ef6d8ec10c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971925 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a39508b8-299a-40c7-872b-9cffd0d4ad11-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971957 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57505fbf-a7f7-45ff-91bb-f3463567721e-config-volume\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.971982 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c98j2\" (UniqueName: \"kubernetes.io/projected/45d6565a-19c0-4077-a8b2-727014145e16-kube-api-access-c98j2\") pod \"ingress-canary-rknn2\" (UID: \"45d6565a-19c0-4077-a8b2-727014145e16\") " pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972021 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvqmx\" (UniqueName: \"kubernetes.io/projected/4886a538-d346-47dc-a6cb-184ae7d015b9-kube-api-access-hvqmx\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972074 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/364de1a3-22d0-4012-ac93-def721ef2851-images\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972101 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5txhq\" (UniqueName: \"kubernetes.io/projected/d298abde-996b-4c31-a26c-474a2b9e8742-kube-api-access-5txhq\") pod \"multus-admission-controller-857f4d67dd-rs9bk\" (UID: \"d298abde-996b-4c31-a26c-474a2b9e8742\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972127 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8mbf\" (UniqueName: \"kubernetes.io/projected/f6167606-771c-4d04-840f-17a8998a71a4-kube-api-access-p8mbf\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972154 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tf54\" (UniqueName: \"kubernetes.io/projected/9557d61f-1030-4412-87ec-372c7b26f9b2-kube-api-access-9tf54\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-config\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctlcc\" (UniqueName: \"kubernetes.io/projected/36b59aa6-8bde-4935-82ce-04ef6d8ec10c-kube-api-access-ctlcc\") pod \"control-plane-machine-set-operator-78cbb6b69f-m2qpz\" (UID: \"36b59aa6-8bde-4935-82ce-04ef6d8ec10c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972243 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57505fbf-a7f7-45ff-91bb-f3463567721e-secret-volume\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972273 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-node-bootstrap-token\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972329 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc5qk\" (UniqueName: \"kubernetes.io/projected/57505fbf-a7f7-45ff-91bb-f3463567721e-kube-api-access-bc5qk\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972367 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-certs\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972393 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/364de1a3-22d0-4012-ac93-def721ef2851-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972433 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqzp8\" (UniqueName: \"kubernetes.io/projected/85b3072c-c7f7-41e4-a661-c8cdeb4cd811-kube-api-access-qqzp8\") pod \"migrator-59844c95c7-cwrtc\" (UID: \"85b3072c-c7f7-41e4-a661-c8cdeb4cd811\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.972869 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn"] Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.974104 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6167606-771c-4d04-840f-17a8998a71a4-config\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: E0130 21:19:13.974784 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.474764614 +0000 UTC m=+143.266666050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.974957 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/364de1a3-22d0-4012-ac93-def721ef2851-images\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.975669 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" event={"ID":"e4119979-962f-4ed5-9a63-b8498333259b","Type":"ContainerStarted","Data":"6ecf72ce2761df3406bf7570690c22869bc64c6cff289a1263124af3805da56e"} Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.975760 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" event={"ID":"e4119979-962f-4ed5-9a63-b8498333259b","Type":"ContainerStarted","Data":"78b3ef6b055d4d6e46519891ba1a5e0484e3e58d9dd19c4de0b1bd1129ebba44"} Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.976019 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57505fbf-a7f7-45ff-91bb-f3463567721e-config-volume\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.978091 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.978348 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4886a538-d346-47dc-a6cb-184ae7d015b9-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.978623 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4886a538-d346-47dc-a6cb-184ae7d015b9-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.979131 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-signing-cabundle\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.976532 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-config\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.979698 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39508b8-299a-40c7-872b-9cffd0d4ad11-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.979936 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/364de1a3-22d0-4012-ac93-def721ef2851-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.982642 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" event={"ID":"fe55c0cf-cc81-4cd9-94d8-5637539acba4","Type":"ContainerStarted","Data":"3ca21a265ffe19857eb6a00b060c373fc393d1568a9554a1f89291d339ed8616"} Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.982701 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.985060 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.985925 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-node-bootstrap-token\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.986400 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6167606-771c-4d04-840f-17a8998a71a4-serving-cert\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.987084 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9557d61f-1030-4412-87ec-372c7b26f9b2-profile-collector-cert\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.987728 4721 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-pt27j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.989480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n246h\" (UniqueName: \"kubernetes.io/projected/714b4cdb-f9a5-4d74-b46f-21e6167f1807-kube-api-access-n246h\") pod \"packageserver-d55dfcdfc-d2jql\" (UID: \"714b4cdb-f9a5-4d74-b46f-21e6167f1807\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.989508 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/364de1a3-22d0-4012-ac93-def721ef2851-proxy-tls\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.989743 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.990148 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/36b59aa6-8bde-4935-82ce-04ef6d8ec10c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m2qpz\" (UID: \"36b59aa6-8bde-4935-82ce-04ef6d8ec10c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.990594 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.991091 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9557d61f-1030-4412-87ec-372c7b26f9b2-srv-cert\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:13 crc kubenswrapper[4721]: W0130 21:19:13.995807 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3a893c7_1f1c_48cb_b7bb_9f3a0ab23773.slice/crio-39d51fef8ef000a0acc7848a5cb5152e6aa27d822a7a49d11830b306426ea2f5 WatchSource:0}: Error finding container 39d51fef8ef000a0acc7848a5cb5152e6aa27d822a7a49d11830b306426ea2f5: Status 404 returned error can't find the container with id 39d51fef8ef000a0acc7848a5cb5152e6aa27d822a7a49d11830b306426ea2f5 Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.996482 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-signing-key\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.996519 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a39508b8-299a-40c7-872b-9cffd0d4ad11-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:13 crc kubenswrapper[4721]: I0130 21:19:13.997219 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57505fbf-a7f7-45ff-91bb-f3463567721e-secret-volume\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.000597 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" event={"ID":"f55fbb51-88ec-4429-beef-ed7d15f8b243","Type":"ContainerStarted","Data":"33e2d41dc358f276c5899705959cf76f34b608a759d0a7a7d1505530da01e1c0"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.001463 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.001253 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-certs\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.002397 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.004219 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d298abde-996b-4c31-a26c-474a2b9e8742-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rs9bk\" (UID: \"d298abde-996b-4c31-a26c-474a2b9e8742\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.005707 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nztp4\" (UniqueName: \"kubernetes.io/projected/6a1a4e32-9874-4126-94e1-b741a6bd20c1-kube-api-access-nztp4\") pod \"ingress-operator-5b745b69d9-chc5l\" (UID: \"6a1a4e32-9874-4126-94e1-b741a6bd20c1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.008123 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45d6565a-19c0-4077-a8b2-727014145e16-cert\") pod \"ingress-canary-rknn2\" (UID: \"45d6565a-19c0-4077-a8b2-727014145e16\") " pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.008210 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" event={"ID":"a09a10cf-7430-4d17-bac7-f68e69367526","Type":"ContainerStarted","Data":"9b7c5f953688932d47247fb105d967c77590adf67d31047f505ee1d4f15b7616"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.012617 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.016544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-v7lbj" event={"ID":"8306877f-f9f0-4b02-943f-ab42d3f5f66a","Type":"ContainerStarted","Data":"74d232cee2e9aa7bd51a8c7c6e52a867036c40bf997cc36c75ab1234d7e054fc"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.018534 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.018604 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.020387 4721 generic.go:334] "Generic (PLEG): container finished" podID="0aa4d8b0-73fa-4173-903c-c397c58652ef" containerID="1b5caf0cbef8824ef2647c00de3499a3d1c7f8e0f187b25046719d875a2590eb" exitCode=0 Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.016823 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.025534 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" event={"ID":"0aa4d8b0-73fa-4173-903c-c397c58652ef","Type":"ContainerDied","Data":"1b5caf0cbef8824ef2647c00de3499a3d1c7f8e0f187b25046719d875a2590eb"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.025770 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p6lgp" event={"ID":"5c12bf80-90a7-457e-ad04-e0b55d28e2bf","Type":"ContainerStarted","Data":"093050870b2c40587ae2fae69ab607e8caeada6e9e16dca4847bbe6aa7b34229"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.028365 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jshkx\" (UniqueName: \"kubernetes.io/projected/93862821-84b5-4736-b516-73b6b69064ff-kube-api-access-jshkx\") pod \"csi-hostpathplugin-vhzw8\" (UID: \"93862821-84b5-4736-b516-73b6b69064ff\") " pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.041625 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" event={"ID":"9e2beb30-525e-45b7-858f-22d5f5f591d6","Type":"ContainerStarted","Data":"b365badb87f81e8e2a85a59a25f2d5a011a6c3764e590b320a9c185f70313c54"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.042648 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5bwr\" (UniqueName: \"kubernetes.io/projected/67324f1b-9004-416f-82a6-5758c069a111-kube-api-access-d5bwr\") pod \"olm-operator-6b444d44fb-2jj7w\" (UID: \"67324f1b-9004-416f-82a6-5758c069a111\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.065264 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gbgkl"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.068402 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" event={"ID":"834ba560-2a16-437b-8d57-20a0017ee78f","Type":"ContainerStarted","Data":"f8f61d6c2e69aa1bcea03cd973b81cae9bd47776760928e15913d8d5a3428731"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.073274 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.073862 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.573825139 +0000 UTC m=+143.365726385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.074218 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" event={"ID":"1e55c467-ce3b-40a2-bfd7-eb5b8a7dd326","Type":"ContainerStarted","Data":"209b411cbd7a7d13036b0177f3444789d12f8fd94410ded4ae2d088bc8b0b189"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.078998 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.081935 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqzp8\" (UniqueName: \"kubernetes.io/projected/85b3072c-c7f7-41e4-a661-c8cdeb4cd811-kube-api-access-qqzp8\") pod \"migrator-59844c95c7-cwrtc\" (UID: \"85b3072c-c7f7-41e4-a661-c8cdeb4cd811\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.105323 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8rcx\" (UniqueName: \"kubernetes.io/projected/2e7ef368-53ce-42e6-85ca-251cd3dcdd15-kube-api-access-q8rcx\") pod \"machine-config-server-lqxsf\" (UID: \"2e7ef368-53ce-42e6-85ca-251cd3dcdd15\") " pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.108836 4721 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-xltgq container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" start-of-body= Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.108879 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" podUID="aa939e70-7c23-478e-9fca-ac0632a1295a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.118804 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" event={"ID":"aa939e70-7c23-478e-9fca-ac0632a1295a","Type":"ContainerStarted","Data":"972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.118850 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:14 crc kubenswrapper[4721]: W0130 21:19:14.127742 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95250d0b_0b56_458d_9476_0763808f1735.slice/crio-e8ab23e416f87679e1345ec1a650e5159c7dff963fe732e45045ce8904359d8e WatchSource:0}: Error finding container e8ab23e416f87679e1345ec1a650e5159c7dff963fe732e45045ce8904359d8e: Status 404 returned error can't find the container with id e8ab23e416f87679e1345ec1a650e5159c7dff963fe732e45045ce8904359d8e Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.127947 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4c8d2e38-94eb-4ad2-b288-c25e62ba07ee-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4rkzz\" (UID: \"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.128035 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-czsct" event={"ID":"c93c14cf-0b1f-419e-9282-d58e950c1e42","Type":"ContainerStarted","Data":"419abb4ae9303161e471dd63a243098279929dde7fe9746fff43505b7e70cf62"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.128507 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.131978 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" event={"ID":"9d5b2021-f707-45bf-9890-d0ec722cd52f","Type":"ContainerStarted","Data":"45be91227b8316c673b8c0028c6343161b27d9bbfcfc328217c217fd28684894"} Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.133843 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a39508b8-299a-40c7-872b-9cffd0d4ad11-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-m44q7\" (UID: \"a39508b8-299a-40c7-872b-9cffd0d4ad11\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.136982 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.138566 4721 patch_prober.go:28] interesting pod/console-operator-58897d9998-czsct container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.138648 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-czsct" podUID="c93c14cf-0b1f-419e-9282-d58e950c1e42" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.168068 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c98j2\" (UniqueName: \"kubernetes.io/projected/45d6565a-19c0-4077-a8b2-727014145e16-kube-api-access-c98j2\") pod \"ingress-canary-rknn2\" (UID: \"45d6565a-19c0-4077-a8b2-727014145e16\") " pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.175669 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.179100 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.679088558 +0000 UTC m=+143.470989794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.183351 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.183789 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5txhq\" (UniqueName: \"kubernetes.io/projected/d298abde-996b-4c31-a26c-474a2b9e8742-kube-api-access-5txhq\") pod \"multus-admission-controller-857f4d67dd-rs9bk\" (UID: \"d298abde-996b-4c31-a26c-474a2b9e8742\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.193565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8mbf\" (UniqueName: \"kubernetes.io/projected/f6167606-771c-4d04-840f-17a8998a71a4-kube-api-access-p8mbf\") pod \"service-ca-operator-777779d784-smcr8\" (UID: \"f6167606-771c-4d04-840f-17a8998a71a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.199592 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.218019 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lqxsf" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.223278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-rknn2" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.243036 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tf54\" (UniqueName: \"kubernetes.io/projected/9557d61f-1030-4412-87ec-372c7b26f9b2-kube-api-access-9tf54\") pod \"catalog-operator-68c6474976-22jmb\" (UID: \"9557d61f-1030-4412-87ec-372c7b26f9b2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.244503 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.249410 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4764t\" (UniqueName: \"kubernetes.io/projected/d4cadf9a-d589-4023-9c1d-9c3d07d69bbc-kube-api-access-4764t\") pod \"service-ca-9c57cc56f-r4652\" (UID: \"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc\") " pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.249759 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-r4652" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.260169 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.261795 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvqmx\" (UniqueName: \"kubernetes.io/projected/4886a538-d346-47dc-a6cb-184ae7d015b9-kube-api-access-hvqmx\") pod \"kube-storage-version-migrator-operator-b67b599dd-mwswp\" (UID: \"4886a538-d346-47dc-a6cb-184ae7d015b9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.277107 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.278276 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.778256767 +0000 UTC m=+143.570158013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.285926 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.290763 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw2sp\" (UniqueName: \"kubernetes.io/projected/364de1a3-22d0-4012-ac93-def721ef2851-kube-api-access-vw2sp\") pod \"machine-config-operator-74547568cd-m7b8l\" (UID: \"364de1a3-22d0-4012-ac93-def721ef2851\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.294091 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljqxv\" (UniqueName: \"kubernetes.io/projected/10335cae-c54e-4bf1-b41c-6df530ac47dc-kube-api-access-ljqxv\") pod \"marketplace-operator-79b997595-q9qsz\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.316945 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctlcc\" (UniqueName: \"kubernetes.io/projected/36b59aa6-8bde-4935-82ce-04ef6d8ec10c-kube-api-access-ctlcc\") pod \"control-plane-machine-set-operator-78cbb6b69f-m2qpz\" (UID: \"36b59aa6-8bde-4935-82ce-04ef6d8ec10c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.331856 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.343715 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc5qk\" (UniqueName: \"kubernetes.io/projected/57505fbf-a7f7-45ff-91bb-f3463567721e-kube-api-access-bc5qk\") pod \"collect-profiles-29496795-h48jk\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.380649 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.381427 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.881415527 +0000 UTC m=+143.673316773 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.392243 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.430243 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.441697 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.461120 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.467481 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.490736 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.494260 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:14.994206963 +0000 UTC m=+143.786108219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.511670 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.543614 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.552508 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.563456 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.592193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.594682 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.094659444 +0000 UTC m=+143.886560690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.692714 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.700459 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.700791 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.200773691 +0000 UTC m=+143.992674937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.785452 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vhzw8"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.801619 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.802020 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.302004848 +0000 UTC m=+144.093906094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: W0130 21:19:14.888644 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85b3072c_c7f7_41e4_a661_c8cdeb4cd811.slice/crio-8b78584deab579aafe7287a361637f027ddbf29a6a3105f9c3823df782cc048a WatchSource:0}: Error finding container 8b78584deab579aafe7287a361637f027ddbf29a6a3105f9c3823df782cc048a: Status 404 returned error can't find the container with id 8b78584deab579aafe7287a361637f027ddbf29a6a3105f9c3823df782cc048a Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.897615 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz"] Jan 30 21:19:14 crc kubenswrapper[4721]: I0130 21:19:14.903866 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:14 crc kubenswrapper[4721]: E0130 21:19:14.904404 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.404378492 +0000 UTC m=+144.196279738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:14 crc kubenswrapper[4721]: W0130 21:19:14.918194 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93862821_84b5_4736_b516_73b6b69064ff.slice/crio-a2e0d912253b5bd6bb464aa541e55d0034c303c3e81ce91c2c08ba04b74dd45c WatchSource:0}: Error finding container a2e0d912253b5bd6bb464aa541e55d0034c303c3e81ce91c2c08ba04b74dd45c: Status 404 returned error can't find the container with id a2e0d912253b5bd6bb464aa541e55d0034c303c3e81ce91c2c08ba04b74dd45c Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.014756 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.017029 4721 csr.go:261] certificate signing request csr-qmqvg is approved, waiting to be issued Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.017191 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.517155148 +0000 UTC m=+144.309056394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.026657 4721 csr.go:257] certificate signing request csr-qmqvg is issued Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.056388 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rs9bk"] Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.120807 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.121233 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.621213317 +0000 UTC m=+144.413114563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.181828 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-smcr8"] Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.226100 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" event={"ID":"c8c5d710-b5ec-4998-899c-14cac760d429","Type":"ContainerStarted","Data":"9bcfc42df5c4d630f73fd9b3585eca1781f3164a03c1b16db8dc8ac8a826779d"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.227105 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.227561 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.727546661 +0000 UTC m=+144.519447907 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.263883 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" event={"ID":"714b4cdb-f9a5-4d74-b46f-21e6167f1807","Type":"ContainerStarted","Data":"15ae335a7acd43a435d3b75f1fe5122d4712234626ea5dc335a464a1b550ad42"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.270648 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p6lgp" event={"ID":"5c12bf80-90a7-457e-ad04-e0b55d28e2bf","Type":"ContainerStarted","Data":"de33f405c5bc7fa93a60e67efff98085e4da9e2d9fc5c2af99db625ffcdf0b31"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.297559 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lqxsf" event={"ID":"2e7ef368-53ce-42e6-85ca-251cd3dcdd15","Type":"ContainerStarted","Data":"a8a8dbd582aae2bbf82f5102a2a47831b320af61431d7ae7303457467c68a8f6"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.307279 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" event={"ID":"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee","Type":"ContainerStarted","Data":"3b4deb20c1bc1254c9e935f964ec7520ea948bd0841cc4dc1736bae0c3fd5ca2"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.314098 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gbgkl" event={"ID":"95250d0b-0b56-458d-9476-0763808f1735","Type":"ContainerStarted","Data":"e8ab23e416f87679e1345ec1a650e5159c7dff963fe732e45045ce8904359d8e"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.325789 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" event={"ID":"85b3072c-c7f7-41e4-a661-c8cdeb4cd811","Type":"ContainerStarted","Data":"8b78584deab579aafe7287a361637f027ddbf29a6a3105f9c3823df782cc048a"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.340160 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.340667 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.840647858 +0000 UTC m=+144.632549104 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.369837 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" event={"ID":"0aa4d8b0-73fa-4173-903c-c397c58652ef","Type":"ContainerStarted","Data":"afd7436ab5c86df9f02ec5cd5e92030725fd1a1cbbe110963b2db0c92adf8f3c"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.397555 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" event={"ID":"0b7a01e4-06c5-425c-8e68-d1b774fe74d2","Type":"ContainerStarted","Data":"91ca3d888c199907d737ec9ccd0ae50a2a85141678929f43d2201183413f1a26"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.397605 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" event={"ID":"0b7a01e4-06c5-425c-8e68-d1b774fe74d2","Type":"ContainerStarted","Data":"4d98a9859608c5d5a4e9d5a0e6594665e21ae13ab0beeda654d2bf07e3e3b2ac"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.400846 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" event={"ID":"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773","Type":"ContainerStarted","Data":"eb9657901e9b5a600e7127c89c43a1e7d1db07812d92bc3baad8ab1cdd30dae5"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.401069 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" event={"ID":"d3a893c7-1f1c-48cb-b7bb-9f3a0ab23773","Type":"ContainerStarted","Data":"39d51fef8ef000a0acc7848a5cb5152e6aa27d822a7a49d11830b306426ea2f5"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.405086 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" event={"ID":"93862821-84b5-4736-b516-73b6b69064ff","Type":"ContainerStarted","Data":"a2e0d912253b5bd6bb464aa541e55d0034c303c3e81ce91c2c08ba04b74dd45c"} Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.405136 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.413082 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.413159 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.423701 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.423786 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.441885 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.442656 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:15.942643779 +0000 UTC m=+144.734545025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.544631 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.545684 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.045664865 +0000 UTC m=+144.837566111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.646307 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.646905 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.146892131 +0000 UTC m=+144.938793377 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.747922 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.748837 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.248822232 +0000 UTC m=+145.040723478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.804635 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" podStartSLOduration=122.80461795 podStartE2EDuration="2m2.80461795s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:15.799109059 +0000 UTC m=+144.591010305" watchObservedRunningTime="2026-01-30 21:19:15.80461795 +0000 UTC m=+144.596519196" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.819533 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.848647 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4j4cn" podStartSLOduration=121.848631562 podStartE2EDuration="2m1.848631562s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:15.84093344 +0000 UTC m=+144.632834686" watchObservedRunningTime="2026-01-30 21:19:15.848631562 +0000 UTC m=+144.640532808" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.850062 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.850471 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.350458522 +0000 UTC m=+145.142359768 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.970910 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-v7lbj" podStartSLOduration=122.970892707 podStartE2EDuration="2m2.970892707s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:15.970526866 +0000 UTC m=+144.762428112" watchObservedRunningTime="2026-01-30 21:19:15.970892707 +0000 UTC m=+144.762793953" Jan 30 21:19:15 crc kubenswrapper[4721]: I0130 21:19:15.987171 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:15 crc kubenswrapper[4721]: E0130 21:19:15.987740 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.487724049 +0000 UTC m=+145.279625295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.032637 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lw9fr" podStartSLOduration=123.03262266 podStartE2EDuration="2m3.03262266s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.032151365 +0000 UTC m=+144.824052611" watchObservedRunningTime="2026-01-30 21:19:16.03262266 +0000 UTC m=+144.824523906" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.033267 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:16 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:16 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:16 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.033337 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.033760 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-30 21:14:15 +0000 UTC, rotation deadline is 2026-11-14 19:15:28.787674138 +0000 UTC Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.033806 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6909h56m12.753870999s for next certificate rotation Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.066989 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.090219 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.090911 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.59089932 +0000 UTC m=+145.382800566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.096207 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-gs7nh" podStartSLOduration=123.096183113 podStartE2EDuration="2m3.096183113s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.08264795 +0000 UTC m=+144.874549196" watchObservedRunningTime="2026-01-30 21:19:16.096183113 +0000 UTC m=+144.888084359" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.175610 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" podStartSLOduration=122.175596585 podStartE2EDuration="2m2.175596585s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.17512643 +0000 UTC m=+144.967027676" watchObservedRunningTime="2026-01-30 21:19:16.175596585 +0000 UTC m=+144.967497831" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.204693 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.205147 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.705132743 +0000 UTC m=+145.497033989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.209512 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-czsct" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.227369 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-r4652"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.251671 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kh9xb" podStartSLOduration=123.251654447 podStartE2EDuration="2m3.251654447s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.243145898 +0000 UTC m=+145.035047144" watchObservedRunningTime="2026-01-30 21:19:16.251654447 +0000 UTC m=+145.043555693" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.262539 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz"] Jan 30 21:19:16 crc kubenswrapper[4721]: W0130 21:19:16.292484 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4cadf9a_d589_4023_9c1d_9c3d07d69bbc.slice/crio-a3d422768adea6a6b92f042a61ed8cfb191db2edab31360bafb47906d3c9ed89 WatchSource:0}: Error finding container a3d422768adea6a6b92f042a61ed8cfb191db2edab31360bafb47906d3c9ed89: Status 404 returned error can't find the container with id a3d422768adea6a6b92f042a61ed8cfb191db2edab31360bafb47906d3c9ed89 Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.305697 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w7pjl" podStartSLOduration=123.305679328 podStartE2EDuration="2m3.305679328s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.275131967 +0000 UTC m=+145.067033213" watchObservedRunningTime="2026-01-30 21:19:16.305679328 +0000 UTC m=+145.097580574" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.315165 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.315491 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.815480008 +0000 UTC m=+145.607381244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.315569 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q9qsz"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.316849 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-rknn2"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.325423 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-ktnwk" podStartSLOduration=123.325400564 podStartE2EDuration="2m3.325400564s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.315671915 +0000 UTC m=+145.107573161" watchObservedRunningTime="2026-01-30 21:19:16.325400564 +0000 UTC m=+145.117301810" Jan 30 21:19:16 crc kubenswrapper[4721]: W0130 21:19:16.343920 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10335cae_c54e_4bf1_b41c_6df530ac47dc.slice/crio-72e0d63182060b674c4bff9078801338af160fd46676798be612bd9c1007a8e9 WatchSource:0}: Error finding container 72e0d63182060b674c4bff9078801338af160fd46676798be612bd9c1007a8e9: Status 404 returned error can't find the container with id 72e0d63182060b674c4bff9078801338af160fd46676798be612bd9c1007a8e9 Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.363093 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-rhwvl" podStartSLOduration=122.363073008 podStartE2EDuration="2m2.363073008s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.362736187 +0000 UTC m=+145.154637433" watchObservedRunningTime="2026-01-30 21:19:16.363073008 +0000 UTC m=+145.154974254" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.391946 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-th4nz" podStartSLOduration=122.391927863 podStartE2EDuration="2m2.391927863s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.390125275 +0000 UTC m=+145.182026521" watchObservedRunningTime="2026-01-30 21:19:16.391927863 +0000 UTC m=+145.183829109" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.411764 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.417875 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.418254 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:16.918237896 +0000 UTC m=+145.710139142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.433993 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8hxlg" podStartSLOduration=122.433977611 podStartE2EDuration="2m2.433977611s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.431886543 +0000 UTC m=+145.223787789" watchObservedRunningTime="2026-01-30 21:19:16.433977611 +0000 UTC m=+145.225878857" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.435866 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" event={"ID":"36b59aa6-8bde-4935-82ce-04ef6d8ec10c","Type":"ContainerStarted","Data":"ed1a26f17dead5a9aedc6cf51539ea143913c790e622d03c0de348ccf5379508"} Jan 30 21:19:16 crc kubenswrapper[4721]: W0130 21:19:16.481811 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9557d61f_1030_4412_87ec_372c7b26f9b2.slice/crio-127f891f87df326274d677a1bf099770bc677a9d3e9e3674be423596e5dc3df9 WatchSource:0}: Error finding container 127f891f87df326274d677a1bf099770bc677a9d3e9e3674be423596e5dc3df9: Status 404 returned error can't find the container with id 127f891f87df326274d677a1bf099770bc677a9d3e9e3674be423596e5dc3df9 Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.508701 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" event={"ID":"0aa4d8b0-73fa-4173-903c-c397c58652ef","Type":"ContainerStarted","Data":"4c8b0629632a13b04271ae9608f9bf99e62dc4c986cb6f41a318fdf98ae58e54"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.521353 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" event={"ID":"f6167606-771c-4d04-840f-17a8998a71a4","Type":"ContainerStarted","Data":"790070b2c3e2438eb42c7121198f96e34c18ae09077900274a72faf4ffd59622"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.521401 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" event={"ID":"f6167606-771c-4d04-840f-17a8998a71a4","Type":"ContainerStarted","Data":"22837b4b5407427b7408c476f3f1c75459fc19304a44557255f7a1828fc71b39"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.521532 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.521978 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.021963514 +0000 UTC m=+145.813864760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.535054 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" podStartSLOduration=122.535025792 podStartE2EDuration="2m2.535025792s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.521474718 +0000 UTC m=+145.313375964" watchObservedRunningTime="2026-01-30 21:19:16.535025792 +0000 UTC m=+145.326927038" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.542153 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lqxsf" event={"ID":"2e7ef368-53ce-42e6-85ca-251cd3dcdd15","Type":"ContainerStarted","Data":"97fd20980822e0aa9bbff1a76c3b2b9fc7acd70e1d669a7c9a6d68bbab5ed5b6"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.543458 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-xvtnr" podStartSLOduration=123.543448138 podStartE2EDuration="2m3.543448138s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.541879817 +0000 UTC m=+145.333781063" watchObservedRunningTime="2026-01-30 21:19:16.543448138 +0000 UTC m=+145.335349384" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.562472 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.623060 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2zn7w" podStartSLOduration=122.623038706 podStartE2EDuration="2m2.623038706s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.618707674 +0000 UTC m=+145.410608920" watchObservedRunningTime="2026-01-30 21:19:16.623038706 +0000 UTC m=+145.414939952" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.626207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" event={"ID":"4c8d2e38-94eb-4ad2-b288-c25e62ba07ee","Type":"ContainerStarted","Data":"4240b360761a2c5d3df908c6a9af746582fe5c44f8eff379a2e26a5a8f2882f8"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.627470 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.629759 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.129726785 +0000 UTC m=+145.921628031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.667459 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.669187 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gbgkl" event={"ID":"95250d0b-0b56-458d-9476-0763808f1735","Type":"ContainerStarted","Data":"324fb53adc04449f60ecac119e008d59f8a5fbcd2c0a2b559a531345ffef55dd"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.671071 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.671742 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.709254 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.709397 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.709619 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-czsct" podStartSLOduration=123.709610973 podStartE2EDuration="2m3.709610973s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.649326587 +0000 UTC m=+145.441227833" watchObservedRunningTime="2026-01-30 21:19:16.709610973 +0000 UTC m=+145.501512219" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.714565 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l"] Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.732940 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.734488 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.234473517 +0000 UTC m=+146.026374763 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.740267 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" podStartSLOduration=122.740248247 podStartE2EDuration="2m2.740248247s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.731927944 +0000 UTC m=+145.523829190" watchObservedRunningTime="2026-01-30 21:19:16.740248247 +0000 UTC m=+145.532149493" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.740329 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" event={"ID":"85b3072c-c7f7-41e4-a661-c8cdeb4cd811","Type":"ContainerStarted","Data":"23dff6ffcc5b2010b70b0ddb2f474636091c79e0db8033e95d5b176d9c601561"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.775061 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.775400 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.777777 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" event={"ID":"c8c5d710-b5ec-4998-899c-14cac760d429","Type":"ContainerStarted","Data":"b4a9ce0cac33858c98e03241f9f3e6f7aaa02ca14983998826479557e8fef7a4"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.782378 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" event={"ID":"714b4cdb-f9a5-4d74-b46f-21e6167f1807","Type":"ContainerStarted","Data":"abf3e8df9ad5b65cc79c81c7f78fc968666e7b9438c10ea3374885aaec9b644e"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.785419 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.796322 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" event={"ID":"d298abde-996b-4c31-a26c-474a2b9e8742","Type":"ContainerStarted","Data":"9c2562adda5cd1baf6cbfae80ca0312c16c7321c9beeeb8eeef1a7b067306cd1"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.796366 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" event={"ID":"d298abde-996b-4c31-a26c-474a2b9e8742","Type":"ContainerStarted","Data":"1bbc3bb7099553a70044dce31c9fec02bd71c8d080e8f03e5b8d9af0b81aadf3"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.801669 4721 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-d2jql container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" start-of-body= Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.801714 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" podUID="714b4cdb-f9a5-4d74-b46f-21e6167f1807" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.816771 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.830183 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" podStartSLOduration=123.830166313 podStartE2EDuration="2m3.830166313s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.82673119 +0000 UTC m=+145.618632436" watchObservedRunningTime="2026-01-30 21:19:16.830166313 +0000 UTC m=+145.622067569" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.834796 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.835671 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:16 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:16 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:16 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.835715 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.835778 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.335760976 +0000 UTC m=+146.127662222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.836859 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-rknn2" event={"ID":"45d6565a-19c0-4077-a8b2-727014145e16","Type":"ContainerStarted","Data":"0ac3d6625a33685badc94bc7b41d9f8f454694633f3a4e06c5d90c4fd6dc65a6"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.874221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" event={"ID":"0b7a01e4-06c5-425c-8e68-d1b774fe74d2","Type":"ContainerStarted","Data":"69b7fc17b23e3d23522b0996fa99222b356909a87a062bccb5d1755fb7b6e6e8"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.874689 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.884623 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-p6lgp" podStartSLOduration=122.884606406 podStartE2EDuration="2m2.884606406s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:16.878704243 +0000 UTC m=+145.670605489" watchObservedRunningTime="2026-01-30 21:19:16.884606406 +0000 UTC m=+145.676507652" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.912655 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" event={"ID":"10335cae-c54e-4bf1-b41c-6df530ac47dc","Type":"ContainerStarted","Data":"72e0d63182060b674c4bff9078801338af160fd46676798be612bd9c1007a8e9"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.929063 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-r4652" event={"ID":"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc","Type":"ContainerStarted","Data":"a3d422768adea6a6b92f042a61ed8cfb191db2edab31360bafb47906d3c9ed89"} Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.930393 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.930456 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.940378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:16 crc kubenswrapper[4721]: E0130 21:19:16.942574 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.442557325 +0000 UTC m=+146.234458581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:16 crc kubenswrapper[4721]: I0130 21:19:16.946561 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-28slp" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.014551 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-lqxsf" podStartSLOduration=6.014534684 podStartE2EDuration="6.014534684s" podCreationTimestamp="2026-01-30 21:19:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.0129171 +0000 UTC m=+145.804818346" watchObservedRunningTime="2026-01-30 21:19:17.014534684 +0000 UTC m=+145.806435930" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.041923 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.043010 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.542989536 +0000 UTC m=+146.334890782 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.077013 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" podStartSLOduration=123.07699911 podStartE2EDuration="2m3.07699911s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.075840053 +0000 UTC m=+145.867741289" watchObservedRunningTime="2026-01-30 21:19:17.07699911 +0000 UTC m=+145.868900346" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.101201 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" podStartSLOduration=123.101181383 podStartE2EDuration="2m3.101181383s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.100645865 +0000 UTC m=+145.892547111" watchObservedRunningTime="2026-01-30 21:19:17.101181383 +0000 UTC m=+145.893082629" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.126224 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4rkzz" podStartSLOduration=123.126204113 podStartE2EDuration="2m3.126204113s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.121507118 +0000 UTC m=+145.913408364" watchObservedRunningTime="2026-01-30 21:19:17.126204113 +0000 UTC m=+145.918105359" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.140650 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.140866 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.145021 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.145292 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.645281668 +0000 UTC m=+146.437182914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.146826 4721 patch_prober.go:28] interesting pod/apiserver-76f77b778f-nddl4 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.146885 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" podUID="0aa4d8b0-73fa-4173-903c-c397c58652ef" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.225925 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-gbgkl" podStartSLOduration=6.225876668 podStartE2EDuration="6.225876668s" podCreationTimestamp="2026-01-30 21:19:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.220905976 +0000 UTC m=+146.012807222" watchObservedRunningTime="2026-01-30 21:19:17.225876668 +0000 UTC m=+146.017777914" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.245857 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.246233 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.746219205 +0000 UTC m=+146.538120441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.293151 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-smcr8" podStartSLOduration=123.293129782 podStartE2EDuration="2m3.293129782s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.262728566 +0000 UTC m=+146.054629812" watchObservedRunningTime="2026-01-30 21:19:17.293129782 +0000 UTC m=+146.085031028" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.322838 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" podStartSLOduration=123.322822255 podStartE2EDuration="2m3.322822255s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.322107572 +0000 UTC m=+146.114008818" watchObservedRunningTime="2026-01-30 21:19:17.322822255 +0000 UTC m=+146.114723501" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.323038 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" podStartSLOduration=123.323034442 podStartE2EDuration="2m3.323034442s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.287597031 +0000 UTC m=+146.079498277" watchObservedRunningTime="2026-01-30 21:19:17.323034442 +0000 UTC m=+146.114935688" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.347512 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.347758 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.847747032 +0000 UTC m=+146.639648278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.349418 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" podStartSLOduration=123.349410036 podStartE2EDuration="2m3.349410036s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.34799395 +0000 UTC m=+146.139895196" watchObservedRunningTime="2026-01-30 21:19:17.349410036 +0000 UTC m=+146.141311282" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.448954 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.449739 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:17.949724153 +0000 UTC m=+146.741625399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.550601 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.550943 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.050930459 +0000 UTC m=+146.842831705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.651921 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.652321 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.15226801 +0000 UTC m=+146.944169256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.652732 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.653183 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.153165589 +0000 UTC m=+146.945066835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.753910 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.754227 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.254212709 +0000 UTC m=+147.046113955 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.754618 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.754921 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.254912973 +0000 UTC m=+147.046814219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.812454 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:17 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:17 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:17 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.812968 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.830603 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-w99lk" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.856675 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.856833 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.356812162 +0000 UTC m=+147.148713408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.857725 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.858214 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.358201437 +0000 UTC m=+147.150102693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.937963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gbgkl" event={"ID":"95250d0b-0b56-458d-9476-0763808f1735","Type":"ContainerStarted","Data":"1de7c9e7942e4804a11da9c8a68872c8f5c3dc3c76eb369aec20731647b04da1"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.940031 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" event={"ID":"36b59aa6-8bde-4935-82ce-04ef6d8ec10c","Type":"ContainerStarted","Data":"0dd220ec5e7a037a2109c2ba41dea8b44b164849b4ee9b7e01cce3ca96486824"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.943413 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-r4652" event={"ID":"d4cadf9a-d589-4023-9c1d-9c3d07d69bbc","Type":"ContainerStarted","Data":"741a029809c98be825c84e2f688c13587fe887d473f56327279cf044ecc5c31b"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.945828 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cwrtc" event={"ID":"85b3072c-c7f7-41e4-a661-c8cdeb4cd811","Type":"ContainerStarted","Data":"58428750f9721b1ecaf51c3238fa2568f04ba31221b13269d667e20406516b52"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.948607 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" event={"ID":"6a1a4e32-9874-4126-94e1-b741a6bd20c1","Type":"ContainerStarted","Data":"31575c23292dca59cc1ba8a73e65eca488128ea15f8f4db4eb8c3c6498333025"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.948776 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" event={"ID":"6a1a4e32-9874-4126-94e1-b741a6bd20c1","Type":"ContainerStarted","Data":"6a70803a9f7d65ce27e898fef936cca09586ff3886ee16cd2a196a7bf3d4e073"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.948862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" event={"ID":"6a1a4e32-9874-4126-94e1-b741a6bd20c1","Type":"ContainerStarted","Data":"569eefd20809c6126f741052b3bf1605a2d4453bbf03004caea0f01cd480b55c"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.950498 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" event={"ID":"93862821-84b5-4736-b516-73b6b69064ff","Type":"ContainerStarted","Data":"7dd36dbd26a5dfd9a7e107006965e4245a7bcb41854748c8baf4809fd3dfffc1"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.952416 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" event={"ID":"10335cae-c54e-4bf1-b41c-6df530ac47dc","Type":"ContainerStarted","Data":"9ffcd0dc728edff83277dc59541f964354d90506eab04b9aa28c29eaa992426b"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.953174 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.954944 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-q9qsz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.954985 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.955832 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w55t" event={"ID":"c8c5d710-b5ec-4998-899c-14cac760d429","Type":"ContainerStarted","Data":"57eed821c46bf09ceca7c4adcb22e64edfeada181aaf30cba35aa967afe51282"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.957914 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" event={"ID":"d298abde-996b-4c31-a26c-474a2b9e8742","Type":"ContainerStarted","Data":"22d498acacbc222a5125f38908ae9fe3266ed4f01f4b8c1c3685fa30e39dbf89"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.959154 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.959490 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.459466305 +0000 UTC m=+147.251367551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.959530 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:17 crc kubenswrapper[4721]: E0130 21:19:17.959990 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.459975832 +0000 UTC m=+147.251877248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.961607 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" event={"ID":"4886a538-d346-47dc-a6cb-184ae7d015b9","Type":"ContainerStarted","Data":"dc44dd8626e6cd78611087447aabd9aa967e29cbcce2e809c8dae2db9e09b6d4"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.961654 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" event={"ID":"4886a538-d346-47dc-a6cb-184ae7d015b9","Type":"ContainerStarted","Data":"1a0f726ce1a6c4ed25071f844ea4e0473195921bede305d8f8304c83ed85375b"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.964465 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" event={"ID":"9557d61f-1030-4412-87ec-372c7b26f9b2","Type":"ContainerStarted","Data":"e518d2db0f963bb7c2a2ac938d36c4d8bce9517c2972a79690eb150d1791f324"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.964515 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" event={"ID":"9557d61f-1030-4412-87ec-372c7b26f9b2","Type":"ContainerStarted","Data":"127f891f87df326274d677a1bf099770bc677a9d3e9e3674be423596e5dc3df9"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.964855 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.966407 4721 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-22jmb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.966445 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" podUID="9557d61f-1030-4412-87ec-372c7b26f9b2" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.967820 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-rknn2" event={"ID":"45d6565a-19c0-4077-a8b2-727014145e16","Type":"ContainerStarted","Data":"27babeee5e71955ed9feaccf8dd68df47af41bfa37876b14bbfbc3532291ad0c"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.970269 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" event={"ID":"364de1a3-22d0-4012-ac93-def721ef2851","Type":"ContainerStarted","Data":"dad5bd3b3a90d5e5af9607934db3b016490151a6cd1f527acae35db58fe9f92a"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.970311 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" event={"ID":"364de1a3-22d0-4012-ac93-def721ef2851","Type":"ContainerStarted","Data":"494497db0678286ae0012a53b14bba431c2274feac874ada21cd913923da77cd"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.970322 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" event={"ID":"364de1a3-22d0-4012-ac93-def721ef2851","Type":"ContainerStarted","Data":"e601e5978053434ebdf9879b2252dd612f4fd431da0884125939d9ce268c4c0b"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.972376 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" event={"ID":"a39508b8-299a-40c7-872b-9cffd0d4ad11","Type":"ContainerStarted","Data":"b3e99e0ee727db6b582e3e3bf76c7224aa58da5ae408f8509a3fdfd8a29a6e33"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.972406 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" event={"ID":"a39508b8-299a-40c7-872b-9cffd0d4ad11","Type":"ContainerStarted","Data":"06358b87db54717793b2f02bf515b9863bea3827011a7004b0f53ada6e44a93c"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.973814 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" event={"ID":"57505fbf-a7f7-45ff-91bb-f3463567721e","Type":"ContainerStarted","Data":"87f6096ead028af7ae8bd76949059a133211f3079c85533a9ff2fc619b0437df"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.973841 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" event={"ID":"57505fbf-a7f7-45ff-91bb-f3463567721e","Type":"ContainerStarted","Data":"f10b5e375a68ba2ea6df87361038b97af9adf65fcdaf26b069c4126c2c5d2243"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.978168 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" event={"ID":"67324f1b-9004-416f-82a6-5758c069a111","Type":"ContainerStarted","Data":"871cb114e5903d1651c5ca71d40bdd99f3fe665f0d4e8236f2636dd2b7b60355"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.978218 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" event={"ID":"67324f1b-9004-416f-82a6-5758c069a111","Type":"ContainerStarted","Data":"f85a8dfe4a1f8c8b204565f50f5a5af89e78b334e733fdb5cde2481e36766f4e"} Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.980173 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.981597 4721 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-2jj7w container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Jan 30 21:19:17 crc kubenswrapper[4721]: I0130 21:19:17.981646 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" podUID="67324f1b-9004-416f-82a6-5758c069a111" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.016589 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m2qpz" podStartSLOduration=124.016571037 podStartE2EDuration="2m4.016571037s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:17.974753626 +0000 UTC m=+146.766654872" watchObservedRunningTime="2026-01-30 21:19:18.016571037 +0000 UTC m=+146.808472283" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.017686 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-chc5l" podStartSLOduration=124.017678542 podStartE2EDuration="2m4.017678542s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.014960263 +0000 UTC m=+146.806861509" watchObservedRunningTime="2026-01-30 21:19:18.017678542 +0000 UTC m=+146.809579788" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.060888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.063309 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.563273186 +0000 UTC m=+147.355174432 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.084945 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-rs9bk" podStartSLOduration=124.084931256 podStartE2EDuration="2m4.084931256s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.044412059 +0000 UTC m=+146.836313305" watchObservedRunningTime="2026-01-30 21:19:18.084931256 +0000 UTC m=+146.876832502" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.085643 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-rknn2" podStartSLOduration=7.085639809 podStartE2EDuration="7.085639809s" podCreationTimestamp="2026-01-30 21:19:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.083877042 +0000 UTC m=+146.875778288" watchObservedRunningTime="2026-01-30 21:19:18.085639809 +0000 UTC m=+146.877541055" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.135597 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-r4652" podStartSLOduration=124.135580125 podStartE2EDuration="2m4.135580125s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.130557301 +0000 UTC m=+146.922458547" watchObservedRunningTime="2026-01-30 21:19:18.135580125 +0000 UTC m=+146.927481371" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.153255 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m7b8l" podStartSLOduration=124.153235574 podStartE2EDuration="2m4.153235574s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.152608544 +0000 UTC m=+146.944509790" watchObservedRunningTime="2026-01-30 21:19:18.153235574 +0000 UTC m=+146.945136820" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.164694 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.166409 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.666396925 +0000 UTC m=+147.458298171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.266837 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.267191 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.767176167 +0000 UTC m=+147.559077413 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.284961 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mwswp" podStartSLOduration=124.28494482 podStartE2EDuration="2m4.28494482s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.282376486 +0000 UTC m=+147.074277732" watchObservedRunningTime="2026-01-30 21:19:18.28494482 +0000 UTC m=+147.076846066" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.341513 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" podStartSLOduration=124.341495073 podStartE2EDuration="2m4.341495073s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.340629924 +0000 UTC m=+147.132531170" watchObservedRunningTime="2026-01-30 21:19:18.341495073 +0000 UTC m=+147.133396319" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.370986 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.371357 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.871343941 +0000 UTC m=+147.663245187 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.400645 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" podStartSLOduration=125.400628861 podStartE2EDuration="2m5.400628861s" podCreationTimestamp="2026-01-30 21:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.397673554 +0000 UTC m=+147.189574800" watchObservedRunningTime="2026-01-30 21:19:18.400628861 +0000 UTC m=+147.192530107" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.434830 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-m44q7" podStartSLOduration=124.43481157 podStartE2EDuration="2m4.43481157s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.432694551 +0000 UTC m=+147.224595797" watchObservedRunningTime="2026-01-30 21:19:18.43481157 +0000 UTC m=+147.226712816" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.471957 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.472190 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:18.972176805 +0000 UTC m=+147.764078051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.513924 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" podStartSLOduration=124.513902702 podStartE2EDuration="2m4.513902702s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.476523867 +0000 UTC m=+147.268425113" watchObservedRunningTime="2026-01-30 21:19:18.513902702 +0000 UTC m=+147.305803948" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.514865 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" podStartSLOduration=124.514858683 podStartE2EDuration="2m4.514858683s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:18.50835417 +0000 UTC m=+147.300255416" watchObservedRunningTime="2026-01-30 21:19:18.514858683 +0000 UTC m=+147.306759929" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.573734 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.574135 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.074123485 +0000 UTC m=+147.866024731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.674888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.675093 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.175065433 +0000 UTC m=+147.966966679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.675323 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.675629 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.1756159 +0000 UTC m=+147.967517146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.776181 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.776319 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.276291339 +0000 UTC m=+148.068192585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.776450 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.776745 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.276737524 +0000 UTC m=+148.068638770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.815142 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:18 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:18 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:18 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.815216 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.877148 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.877368 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.37733749 +0000 UTC m=+148.169238736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.877472 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.877809 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.377800305 +0000 UTC m=+148.169701551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.978596 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.978736 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.478704741 +0000 UTC m=+148.270605977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.978793 4721 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-d2jql container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.978826 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" podUID="714b4cdb-f9a5-4d74-b46f-21e6167f1807" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:19:18 crc kubenswrapper[4721]: I0130 21:19:18.978862 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:18 crc kubenswrapper[4721]: E0130 21:19:18.979181 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.479170487 +0000 UTC m=+148.271071723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.001742 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-q9qsz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.001797 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.019471 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d2jql" Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.022182 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-2jj7w" Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.035214 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-22jmb" Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.079873 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.080073 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.580045042 +0000 UTC m=+148.371946288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.080635 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.082009 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.581997446 +0000 UTC m=+148.373898692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.182917 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.183169 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.683118879 +0000 UTC m=+148.475020125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.183392 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.183722 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.683709129 +0000 UTC m=+148.475610375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.284953 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.285141 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.785113291 +0000 UTC m=+148.577014537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.285329 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.285712 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.78569656 +0000 UTC m=+148.577597806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.386858 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.387199 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.887172195 +0000 UTC m=+148.679073441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.488734 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.489069 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:19.989057054 +0000 UTC m=+148.780958300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.589755 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.589923 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.089900618 +0000 UTC m=+148.881801864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.590183 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.590510 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.090500758 +0000 UTC m=+148.882402194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.691443 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.692250 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.19223253 +0000 UTC m=+148.984133776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.798066 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.798464 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.298450341 +0000 UTC m=+149.090351587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.813424 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:19 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:19 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:19 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.813657 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.899165 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.899479 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.39945212 +0000 UTC m=+149.191353366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.900358 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:19 crc kubenswrapper[4721]: E0130 21:19:19.904520 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.404502876 +0000 UTC m=+149.196404122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:19 crc kubenswrapper[4721]: I0130 21:19:19.965710 4721 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.005196 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.005532 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.005646 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.006636 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 21:19:20 crc kubenswrapper[4721]: E0130 21:19:20.006686 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.506662353 +0000 UTC m=+149.298563599 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.006862 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.007268 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.010068 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.010371 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.022264 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.025073 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.040415 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" event={"ID":"93862821-84b5-4736-b516-73b6b69064ff","Type":"ContainerStarted","Data":"240793f0d893ad669e2f4cb481d0960a218ccbf0af99517b1af5248de3c97e2b"} Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.040606 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" event={"ID":"93862821-84b5-4736-b516-73b6b69064ff","Type":"ContainerStarted","Data":"4b382dcde6f119ab4fd098aff67d147d6c2d83d0b63b230737960c1ebce21300"} Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.040669 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" event={"ID":"93862821-84b5-4736-b516-73b6b69064ff","Type":"ContainerStarted","Data":"b1917a0c488430975907df2db6c5a96e0185e4c624f5fae1e4163bd8f140154e"} Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.062180 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-vhzw8" podStartSLOduration=9.062162132 podStartE2EDuration="9.062162132s" podCreationTimestamp="2026-01-30 21:19:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:20.059505765 +0000 UTC m=+148.851407011" watchObservedRunningTime="2026-01-30 21:19:20.062162132 +0000 UTC m=+148.854063378" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.107102 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.107145 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.107189 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d326199d-f87c-4586-911f-f9dcbb7db01a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.107241 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d326199d-f87c-4586-911f-f9dcbb7db01a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.107367 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:20 crc kubenswrapper[4721]: E0130 21:19:20.109522 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.609509213 +0000 UTC m=+149.401410459 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.111389 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.113272 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.208123 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:20 crc kubenswrapper[4721]: E0130 21:19:20.208346 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.708318471 +0000 UTC m=+149.500219717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.208717 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.208758 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d326199d-f87c-4586-911f-f9dcbb7db01a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.208785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d326199d-f87c-4586-911f-f9dcbb7db01a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.208859 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d326199d-f87c-4586-911f-f9dcbb7db01a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: E0130 21:19:20.209055 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 21:19:20.709047665 +0000 UTC m=+149.500948911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-cm92z" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.227571 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d326199d-f87c-4586-911f-f9dcbb7db01a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.267491 4721 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-30T21:19:19.965739483Z","Handler":null,"Name":""} Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.276043 4721 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.276100 4721 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.310269 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.314892 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.319022 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.335483 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.340004 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.353428 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.411868 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.421052 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.421100 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.516825 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-cm92z\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.536785 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.537072 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9mhm6"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.538009 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.541911 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.552521 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mhm6"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.615954 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-utilities\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.616002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-catalog-content\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.616058 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhlnt\" (UniqueName: \"kubernetes.io/projected/315507aa-1e32-4360-b5b0-aa3625a10b0b-kube-api-access-qhlnt\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.717131 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-catalog-content\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.717196 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhlnt\" (UniqueName: \"kubernetes.io/projected/315507aa-1e32-4360-b5b0-aa3625a10b0b-kube-api-access-qhlnt\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.717256 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-utilities\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.717839 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-utilities\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.717835 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-catalog-content\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.733067 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gsd7h"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.734326 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.736428 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.752200 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhlnt\" (UniqueName: \"kubernetes.io/projected/315507aa-1e32-4360-b5b0-aa3625a10b0b-kube-api-access-qhlnt\") pod \"community-operators-9mhm6\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.754648 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gsd7h"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.813556 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:20 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:20 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:20 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.813669 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.819373 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-catalog-content\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.819507 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-utilities\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.819568 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwmh5\" (UniqueName: \"kubernetes.io/projected/d263bfe7-d31b-445d-933f-2e1bc58a8e26-kube-api-access-dwmh5\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.843626 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cm92z"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.868103 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.922206 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-catalog-content\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.922271 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-utilities\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.922324 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwmh5\" (UniqueName: \"kubernetes.io/projected/d263bfe7-d31b-445d-933f-2e1bc58a8e26-kube-api-access-dwmh5\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.922734 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-catalog-content\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.923180 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-utilities\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.955683 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wrlqs"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.967835 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.967939 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.969691 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwmh5\" (UniqueName: \"kubernetes.io/projected/d263bfe7-d31b-445d-933f-2e1bc58a8e26-kube-api-access-dwmh5\") pod \"certified-operators-gsd7h\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:20 crc kubenswrapper[4721]: I0130 21:19:20.983834 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wrlqs"] Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.024380 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-utilities\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.024423 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj5cj\" (UniqueName: \"kubernetes.io/projected/ecc44241-f566-4099-b6cf-adf0420a14f1-kube-api-access-gj5cj\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.024445 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-catalog-content\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.062898 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.095486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d326199d-f87c-4586-911f-f9dcbb7db01a","Type":"ContainerStarted","Data":"fd02cc0be69567cdb091c2624ded8e969b990107537301300c8affe337500bdb"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.108412 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"578f7a13cb906469c86b927c4058208c3483a472d580db679f55f986922d84a4"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.112127 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ed30432157d5ddce9815088d8dd9a185b862406ce76245f27984a1e7954f83d2"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.115354 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0e0e2c5fe6a09068b9b8111368eb3bf11401a171a0cee2434576b45cafc9d017"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.115379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"06521f623304c1c1a584e026c15f790024ca026365b0ff2d506f2f4ae3c01fcf"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.122171 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" event={"ID":"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70","Type":"ContainerStarted","Data":"b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.122213 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" event={"ID":"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70","Type":"ContainerStarted","Data":"e8c125e72a60e01090d3f9dbae2164e53d4bf91be52172761e04f592d3fa20d0"} Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.124254 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.126663 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj5cj\" (UniqueName: \"kubernetes.io/projected/ecc44241-f566-4099-b6cf-adf0420a14f1-kube-api-access-gj5cj\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.126700 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-catalog-content\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.126779 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-utilities\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.127185 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-utilities\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.127402 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-catalog-content\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.135939 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-42958"] Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.140420 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.158886 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42958"] Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.169252 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj5cj\" (UniqueName: \"kubernetes.io/projected/ecc44241-f566-4099-b6cf-adf0420a14f1-kube-api-access-gj5cj\") pod \"community-operators-wrlqs\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.170001 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" podStartSLOduration=127.169963759 podStartE2EDuration="2m7.169963759s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:19:21.168401879 +0000 UTC m=+149.960303125" watchObservedRunningTime="2026-01-30 21:19:21.169963759 +0000 UTC m=+149.961865005" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.228050 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-utilities\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.228844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5br7v\" (UniqueName: \"kubernetes.io/projected/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-kube-api-access-5br7v\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.229044 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-catalog-content\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.297936 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.333637 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5br7v\" (UniqueName: \"kubernetes.io/projected/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-kube-api-access-5br7v\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.333690 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-catalog-content\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.333751 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-utilities\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.334507 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-utilities\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.334659 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-catalog-content\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.354906 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5br7v\" (UniqueName: \"kubernetes.io/projected/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-kube-api-access-5br7v\") pod \"certified-operators-42958\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.373676 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mhm6"] Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.386861 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gsd7h"] Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.476662 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.576909 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.577431 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.578203 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wrlqs"] Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.579216 4721 patch_prober.go:28] interesting pod/console-f9d7485db-xvtnr container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.579272 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xvtnr" podUID="715d346b-ba37-4920-a27b-5f9ef61133ef" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.708625 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42958"] Jan 30 21:19:21 crc kubenswrapper[4721]: W0130 21:19:21.766172 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc6f52ce_e313_4e62_8cd0_292c19d3cbc5.slice/crio-865a730c88deb77807452c52ca260553a1f2f5cea536a223f53d77e7a71fce54 WatchSource:0}: Error finding container 865a730c88deb77807452c52ca260553a1f2f5cea536a223f53d77e7a71fce54: Status 404 returned error can't find the container with id 865a730c88deb77807452c52ca260553a1f2f5cea536a223f53d77e7a71fce54 Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.822673 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:21 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:21 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:21 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.822725 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.914595 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.914670 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.914700 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:21 crc kubenswrapper[4721]: I0130 21:19:21.914729 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.101480 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.137510 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"24027ebe2e8eaa718888a83bcde7d14d0e7ecdcdeccb131cb5a5e3fd910fd863"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.142219 4721 generic.go:334] "Generic (PLEG): container finished" podID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerID="46afa88203cf92c9c000901f08577d6b749b4593a6c2df120aa2ce2dcb95d854" exitCode=0 Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.142464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42958" event={"ID":"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5","Type":"ContainerDied","Data":"46afa88203cf92c9c000901f08577d6b749b4593a6c2df120aa2ce2dcb95d854"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.142525 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42958" event={"ID":"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5","Type":"ContainerStarted","Data":"865a730c88deb77807452c52ca260553a1f2f5cea536a223f53d77e7a71fce54"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.145489 4721 generic.go:334] "Generic (PLEG): container finished" podID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerID="2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e" exitCode=0 Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.145802 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerDied","Data":"2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.145830 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerStarted","Data":"168a3ae603142cbb35c574b84355569e0c493e1421862a3c80bdde71d57494c3"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.147079 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.147863 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.149546 4721 generic.go:334] "Generic (PLEG): container finished" podID="d326199d-f87c-4586-911f-f9dcbb7db01a" containerID="a6dc3b9ff51c30aa926b361367b7515b32124bef83bed2d8dfe177ee52626f2f" exitCode=0 Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.149927 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d326199d-f87c-4586-911f-f9dcbb7db01a","Type":"ContainerDied","Data":"a6dc3b9ff51c30aa926b361367b7515b32124bef83bed2d8dfe177ee52626f2f"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.152501 4721 generic.go:334] "Generic (PLEG): container finished" podID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerID="16f98f16036e7786720e86e5ce770dc12843f8a68367d429379431c90461cfac" exitCode=0 Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.152557 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsd7h" event={"ID":"d263bfe7-d31b-445d-933f-2e1bc58a8e26","Type":"ContainerDied","Data":"16f98f16036e7786720e86e5ce770dc12843f8a68367d429379431c90461cfac"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.152579 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsd7h" event={"ID":"d263bfe7-d31b-445d-933f-2e1bc58a8e26","Type":"ContainerStarted","Data":"269c88f1a3a5e5ef043ed953eaebe60648d0dee9201b766f54afd895fb4f52f7"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.156475 4721 generic.go:334] "Generic (PLEG): container finished" podID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerID="df1458580e6c0d0c5af90a86d35bbca3c9e9cad4049beb81a85d67c536d1ea8a" exitCode=0 Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.157322 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerDied","Data":"df1458580e6c0d0c5af90a86d35bbca3c9e9cad4049beb81a85d67c536d1ea8a"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.157353 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerStarted","Data":"d13cbadd24d9d7eeab181772ec1a618abca3564d9e378a7b0ffdd52488393106"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.158215 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-nddl4" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.161183 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"db2c5c7888d63693f76babb33bad4fd5bf9d39dcbd8c06e6d27a7dd709f58224"} Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.738156 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bq7"] Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.739145 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.741487 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.752820 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-catalog-content\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.753014 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-utilities\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.753049 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-498hr\" (UniqueName: \"kubernetes.io/projected/3179d84c-16bd-405b-ac42-38a710f7a713-kube-api-access-498hr\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.755594 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bq7"] Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.816192 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:22 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Jan 30 21:19:22 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:22 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.816279 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.860158 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-utilities\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.860218 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-498hr\" (UniqueName: \"kubernetes.io/projected/3179d84c-16bd-405b-ac42-38a710f7a713-kube-api-access-498hr\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.860307 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-catalog-content\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.860853 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-catalog-content\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.861087 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-utilities\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:22 crc kubenswrapper[4721]: I0130 21:19:22.887030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-498hr\" (UniqueName: \"kubernetes.io/projected/3179d84c-16bd-405b-ac42-38a710f7a713-kube-api-access-498hr\") pod \"redhat-marketplace-w4bq7\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.055113 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.132020 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-855fx"] Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.145858 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-855fx"] Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.146010 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.165864 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-catalog-content\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.165984 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-utilities\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.166073 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw2fp\" (UniqueName: \"kubernetes.io/projected/60ea230f-98d6-4955-bf6a-71a91d65ff20-kube-api-access-jw2fp\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.179009 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.267989 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-utilities\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.268071 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw2fp\" (UniqueName: \"kubernetes.io/projected/60ea230f-98d6-4955-bf6a-71a91d65ff20-kube-api-access-jw2fp\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.268138 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-catalog-content\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.268679 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-utilities\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.268813 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-catalog-content\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.314933 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw2fp\" (UniqueName: \"kubernetes.io/projected/60ea230f-98d6-4955-bf6a-71a91d65ff20-kube-api-access-jw2fp\") pod \"redhat-marketplace-855fx\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.442995 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.569650 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.571220 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d326199d-f87c-4586-911f-f9dcbb7db01a-kube-api-access\") pod \"d326199d-f87c-4586-911f-f9dcbb7db01a\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.571319 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d326199d-f87c-4586-911f-f9dcbb7db01a-kubelet-dir\") pod \"d326199d-f87c-4586-911f-f9dcbb7db01a\" (UID: \"d326199d-f87c-4586-911f-f9dcbb7db01a\") " Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.571543 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d326199d-f87c-4586-911f-f9dcbb7db01a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d326199d-f87c-4586-911f-f9dcbb7db01a" (UID: "d326199d-f87c-4586-911f-f9dcbb7db01a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.576439 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d326199d-f87c-4586-911f-f9dcbb7db01a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d326199d-f87c-4586-911f-f9dcbb7db01a" (UID: "d326199d-f87c-4586-911f-f9dcbb7db01a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.644754 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bq7"] Jan 30 21:19:23 crc kubenswrapper[4721]: W0130 21:19:23.650969 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3179d84c_16bd_405b_ac42_38a710f7a713.slice/crio-2c1caf58d2eef99f9fc6e0e188aac3053f444f732c3d570bef2eb8086b71cd9f WatchSource:0}: Error finding container 2c1caf58d2eef99f9fc6e0e188aac3053f444f732c3d570bef2eb8086b71cd9f: Status 404 returned error can't find the container with id 2c1caf58d2eef99f9fc6e0e188aac3053f444f732c3d570bef2eb8086b71cd9f Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.673706 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d326199d-f87c-4586-911f-f9dcbb7db01a-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.673743 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d326199d-f87c-4586-911f-f9dcbb7db01a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.733507 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b48x7"] Jan 30 21:19:23 crc kubenswrapper[4721]: E0130 21:19:23.733790 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d326199d-f87c-4586-911f-f9dcbb7db01a" containerName="pruner" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.733810 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d326199d-f87c-4586-911f-f9dcbb7db01a" containerName="pruner" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.733902 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d326199d-f87c-4586-911f-f9dcbb7db01a" containerName="pruner" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.737595 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.739646 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.743837 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b48x7"] Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.808543 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.813105 4721 patch_prober.go:28] interesting pod/router-default-5444994796-p6lgp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 21:19:23 crc kubenswrapper[4721]: [+]has-synced ok Jan 30 21:19:23 crc kubenswrapper[4721]: [+]process-running ok Jan 30 21:19:23 crc kubenswrapper[4721]: healthz check failed Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.813724 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p6lgp" podUID="5c12bf80-90a7-457e-ad04-e0b55d28e2bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.876833 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-catalog-content\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.877951 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tctzl\" (UniqueName: \"kubernetes.io/projected/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-kube-api-access-tctzl\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.877978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-utilities\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.979005 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tctzl\" (UniqueName: \"kubernetes.io/projected/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-kube-api-access-tctzl\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.979051 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-utilities\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.979159 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-catalog-content\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.980377 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-catalog-content\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:23 crc kubenswrapper[4721]: I0130 21:19:23.980831 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-utilities\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.000463 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tctzl\" (UniqueName: \"kubernetes.io/projected/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-kube-api-access-tctzl\") pod \"redhat-operators-b48x7\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.073593 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.157443 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9khds"] Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.160130 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.165574 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9khds"] Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.185062 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-855fx"] Jan 30 21:19:24 crc kubenswrapper[4721]: W0130 21:19:24.194501 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ea230f_98d6_4955_bf6a_71a91d65ff20.slice/crio-c02eb06e3ee5961533503c5c8457ae1451088b5e2619d0adf44118db841b4234 WatchSource:0}: Error finding container c02eb06e3ee5961533503c5c8457ae1451088b5e2619d0adf44118db841b4234: Status 404 returned error can't find the container with id c02eb06e3ee5961533503c5c8457ae1451088b5e2619d0adf44118db841b4234 Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.213453 4721 generic.go:334] "Generic (PLEG): container finished" podID="57505fbf-a7f7-45ff-91bb-f3463567721e" containerID="87f6096ead028af7ae8bd76949059a133211f3079c85533a9ff2fc619b0437df" exitCode=0 Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.213535 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" event={"ID":"57505fbf-a7f7-45ff-91bb-f3463567721e","Type":"ContainerDied","Data":"87f6096ead028af7ae8bd76949059a133211f3079c85533a9ff2fc619b0437df"} Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.223743 4721 generic.go:334] "Generic (PLEG): container finished" podID="3179d84c-16bd-405b-ac42-38a710f7a713" containerID="5a50adaaf24492169432488a9ec267b73c5de21f5d95e005c336cfba5a132ccc" exitCode=0 Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.223847 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bq7" event={"ID":"3179d84c-16bd-405b-ac42-38a710f7a713","Type":"ContainerDied","Data":"5a50adaaf24492169432488a9ec267b73c5de21f5d95e005c336cfba5a132ccc"} Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.224550 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bq7" event={"ID":"3179d84c-16bd-405b-ac42-38a710f7a713","Type":"ContainerStarted","Data":"2c1caf58d2eef99f9fc6e0e188aac3053f444f732c3d570bef2eb8086b71cd9f"} Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.270768 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.271169 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d326199d-f87c-4586-911f-f9dcbb7db01a","Type":"ContainerDied","Data":"fd02cc0be69567cdb091c2624ded8e969b990107537301300c8affe337500bdb"} Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.271196 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd02cc0be69567cdb091c2624ded8e969b990107537301300c8affe337500bdb" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.285600 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmdg4\" (UniqueName: \"kubernetes.io/projected/cf260939-466c-4142-b8bf-63d2d9a526f2-kube-api-access-fmdg4\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.286125 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-catalog-content\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.286151 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-utilities\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.388640 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-catalog-content\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.388833 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-utilities\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.388901 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmdg4\" (UniqueName: \"kubernetes.io/projected/cf260939-466c-4142-b8bf-63d2d9a526f2-kube-api-access-fmdg4\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.389741 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-utilities\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.392874 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-catalog-content\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.424841 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmdg4\" (UniqueName: \"kubernetes.io/projected/cf260939-466c-4142-b8bf-63d2d9a526f2-kube-api-access-fmdg4\") pod \"redhat-operators-9khds\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.447533 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b48x7"] Jan 30 21:19:24 crc kubenswrapper[4721]: W0130 21:19:24.462430 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52ca26dd_2938_4cf4_b812_4a4bcb014d5b.slice/crio-270d74ef20fba620f3cb07edbe23f3a5d0aa0947437aa68352f2b1b1a001a1f4 WatchSource:0}: Error finding container 270d74ef20fba620f3cb07edbe23f3a5d0aa0947437aa68352f2b1b1a001a1f4: Status 404 returned error can't find the container with id 270d74ef20fba620f3cb07edbe23f3a5d0aa0947437aa68352f2b1b1a001a1f4 Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.487463 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.568238 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.761168 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9khds"] Jan 30 21:19:24 crc kubenswrapper[4721]: W0130 21:19:24.765621 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf260939_466c_4142_b8bf_63d2d9a526f2.slice/crio-49c9cf8ed131d3ad990733b900ac0c8977f803bf72ae9d098b75caa1aa42809f WatchSource:0}: Error finding container 49c9cf8ed131d3ad990733b900ac0c8977f803bf72ae9d098b75caa1aa42809f: Status 404 returned error can't find the container with id 49c9cf8ed131d3ad990733b900ac0c8977f803bf72ae9d098b75caa1aa42809f Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.812859 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.824606 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-p6lgp" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.916993 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.917997 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.922195 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.927412 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 21:19:24 crc kubenswrapper[4721]: I0130 21:19:24.931194 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.099989 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.100061 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.202010 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.202461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.202204 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.223665 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.250401 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.310115 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-855fx" event={"ID":"60ea230f-98d6-4955-bf6a-71a91d65ff20","Type":"ContainerStarted","Data":"c02eb06e3ee5961533503c5c8457ae1451088b5e2619d0adf44118db841b4234"} Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.317441 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerStarted","Data":"49c9cf8ed131d3ad990733b900ac0c8977f803bf72ae9d098b75caa1aa42809f"} Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.324526 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerStarted","Data":"270d74ef20fba620f3cb07edbe23f3a5d0aa0947437aa68352f2b1b1a001a1f4"} Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.665311 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.670232 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:25 crc kubenswrapper[4721]: W0130 21:19:25.715245 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod56b94dee_4bbc_43dd_b8e6_5fbbbd0020fc.slice/crio-fb0409e688f8740b99f6af54f3dbbc453c1f88ec29e0a5a784b14c7f2c2dbfe5 WatchSource:0}: Error finding container fb0409e688f8740b99f6af54f3dbbc453c1f88ec29e0a5a784b14c7f2c2dbfe5: Status 404 returned error can't find the container with id fb0409e688f8740b99f6af54f3dbbc453c1f88ec29e0a5a784b14c7f2c2dbfe5 Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.808817 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-gbgkl" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.834621 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc5qk\" (UniqueName: \"kubernetes.io/projected/57505fbf-a7f7-45ff-91bb-f3463567721e-kube-api-access-bc5qk\") pod \"57505fbf-a7f7-45ff-91bb-f3463567721e\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.834701 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57505fbf-a7f7-45ff-91bb-f3463567721e-secret-volume\") pod \"57505fbf-a7f7-45ff-91bb-f3463567721e\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.834726 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57505fbf-a7f7-45ff-91bb-f3463567721e-config-volume\") pod \"57505fbf-a7f7-45ff-91bb-f3463567721e\" (UID: \"57505fbf-a7f7-45ff-91bb-f3463567721e\") " Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.842903 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57505fbf-a7f7-45ff-91bb-f3463567721e-config-volume" (OuterVolumeSpecName: "config-volume") pod "57505fbf-a7f7-45ff-91bb-f3463567721e" (UID: "57505fbf-a7f7-45ff-91bb-f3463567721e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.848607 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57505fbf-a7f7-45ff-91bb-f3463567721e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "57505fbf-a7f7-45ff-91bb-f3463567721e" (UID: "57505fbf-a7f7-45ff-91bb-f3463567721e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.848735 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57505fbf-a7f7-45ff-91bb-f3463567721e-kube-api-access-bc5qk" (OuterVolumeSpecName: "kube-api-access-bc5qk") pod "57505fbf-a7f7-45ff-91bb-f3463567721e" (UID: "57505fbf-a7f7-45ff-91bb-f3463567721e"). InnerVolumeSpecName "kube-api-access-bc5qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.935953 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc5qk\" (UniqueName: \"kubernetes.io/projected/57505fbf-a7f7-45ff-91bb-f3463567721e-kube-api-access-bc5qk\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.935986 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57505fbf-a7f7-45ff-91bb-f3463567721e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:25 crc kubenswrapper[4721]: I0130 21:19:25.935996 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57505fbf-a7f7-45ff-91bb-f3463567721e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.355571 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" event={"ID":"57505fbf-a7f7-45ff-91bb-f3463567721e","Type":"ContainerDied","Data":"f10b5e375a68ba2ea6df87361038b97af9adf65fcdaf26b069c4126c2c5d2243"} Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.356187 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f10b5e375a68ba2ea6df87361038b97af9adf65fcdaf26b069c4126c2c5d2243" Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.355702 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk" Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.382921 4721 generic.go:334] "Generic (PLEG): container finished" podID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerID="fe4bda470704685f72b9286fb9225436d3a2f2b148aefde4a55e815586df118d" exitCode=0 Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.383088 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-855fx" event={"ID":"60ea230f-98d6-4955-bf6a-71a91d65ff20","Type":"ContainerDied","Data":"fe4bda470704685f72b9286fb9225436d3a2f2b148aefde4a55e815586df118d"} Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.386842 4721 generic.go:334] "Generic (PLEG): container finished" podID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerID="e7c8894979a5e43275cef01265629816d301c33f061014a518600809ddc107a9" exitCode=0 Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.386955 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerDied","Data":"e7c8894979a5e43275cef01265629816d301c33f061014a518600809ddc107a9"} Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.389326 4721 generic.go:334] "Generic (PLEG): container finished" podID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerID="f6d86458990fe2078e82327fdc54227fe9da2308dc9ebbe5b973ed40396a903e" exitCode=0 Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.389378 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerDied","Data":"f6d86458990fe2078e82327fdc54227fe9da2308dc9ebbe5b973ed40396a903e"} Jan 30 21:19:26 crc kubenswrapper[4721]: I0130 21:19:26.392461 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc","Type":"ContainerStarted","Data":"fb0409e688f8740b99f6af54f3dbbc453c1f88ec29e0a5a784b14c7f2c2dbfe5"} Jan 30 21:19:27 crc kubenswrapper[4721]: I0130 21:19:27.402514 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc","Type":"ContainerStarted","Data":"440f97b4fa039c89ef74da199223f188b82876a3d8c577e7d9749e2301272580"} Jan 30 21:19:28 crc kubenswrapper[4721]: I0130 21:19:28.416449 4721 generic.go:334] "Generic (PLEG): container finished" podID="56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc" containerID="440f97b4fa039c89ef74da199223f188b82876a3d8c577e7d9749e2301272580" exitCode=0 Jan 30 21:19:28 crc kubenswrapper[4721]: I0130 21:19:28.416791 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc","Type":"ContainerDied","Data":"440f97b4fa039c89ef74da199223f188b82876a3d8c577e7d9749e2301272580"} Jan 30 21:19:29 crc kubenswrapper[4721]: I0130 21:19:29.451638 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:19:29 crc kubenswrapper[4721]: I0130 21:19:29.452086 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:19:31 crc kubenswrapper[4721]: I0130 21:19:31.585805 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:31 crc kubenswrapper[4721]: I0130 21:19:31.591208 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:19:31 crc kubenswrapper[4721]: I0130 21:19:31.914040 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:31 crc kubenswrapper[4721]: I0130 21:19:31.914203 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-v7lbj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 30 21:19:31 crc kubenswrapper[4721]: I0130 21:19:31.914234 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:31 crc kubenswrapper[4721]: I0130 21:19:31.914249 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-v7lbj" podUID="8306877f-f9f0-4b02-943f-ab42d3f5f66a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.462577 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.467013 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc","Type":"ContainerDied","Data":"fb0409e688f8740b99f6af54f3dbbc453c1f88ec29e0a5a784b14c7f2c2dbfe5"} Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.467051 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb0409e688f8740b99f6af54f3dbbc453c1f88ec29e0a5a784b14c7f2c2dbfe5" Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.535989 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kube-api-access\") pod \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.536094 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kubelet-dir\") pod \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\" (UID: \"56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc\") " Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.536202 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc" (UID: "56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.536262 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.541906 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc" (UID: "56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:19:33 crc kubenswrapper[4721]: I0130 21:19:33.637377 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:34 crc kubenswrapper[4721]: I0130 21:19:34.474272 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 21:19:36 crc kubenswrapper[4721]: I0130 21:19:36.067409 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:19:36 crc kubenswrapper[4721]: I0130 21:19:36.085110 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/19fca1ba-eb6d-479c-90ff-e55739aed640-metrics-certs\") pod \"network-metrics-daemon-bkv95\" (UID: \"19fca1ba-eb6d-479c-90ff-e55739aed640\") " pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:19:36 crc kubenswrapper[4721]: I0130 21:19:36.267026 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bkv95" Jan 30 21:19:37 crc kubenswrapper[4721]: I0130 21:19:37.453089 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j"] Jan 30 21:19:37 crc kubenswrapper[4721]: I0130 21:19:37.453379 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" containerID="cri-o://3ca21a265ffe19857eb6a00b060c373fc393d1568a9554a1f89291d339ed8616" gracePeriod=30 Jan 30 21:19:37 crc kubenswrapper[4721]: I0130 21:19:37.456233 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2b8tc"] Jan 30 21:19:37 crc kubenswrapper[4721]: I0130 21:19:37.456476 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerName="controller-manager" containerID="cri-o://45be91227b8316c673b8c0028c6343161b27d9bbfcfc328217c217fd28684894" gracePeriod=30 Jan 30 21:19:40 crc kubenswrapper[4721]: I0130 21:19:40.511352 4721 generic.go:334] "Generic (PLEG): container finished" podID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerID="45be91227b8316c673b8c0028c6343161b27d9bbfcfc328217c217fd28684894" exitCode=0 Jan 30 21:19:40 crc kubenswrapper[4721]: I0130 21:19:40.511458 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" event={"ID":"9d5b2021-f707-45bf-9890-d0ec722cd52f","Type":"ContainerDied","Data":"45be91227b8316c673b8c0028c6343161b27d9bbfcfc328217c217fd28684894"} Jan 30 21:19:40 crc kubenswrapper[4721]: I0130 21:19:40.513427 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerID="3ca21a265ffe19857eb6a00b060c373fc393d1568a9554a1f89291d339ed8616" exitCode=0 Jan 30 21:19:40 crc kubenswrapper[4721]: I0130 21:19:40.513456 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" event={"ID":"fe55c0cf-cc81-4cd9-94d8-5637539acba4","Type":"ContainerDied","Data":"3ca21a265ffe19857eb6a00b060c373fc393d1568a9554a1f89291d339ed8616"} Jan 30 21:19:40 crc kubenswrapper[4721]: I0130 21:19:40.543515 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:19:41 crc kubenswrapper[4721]: I0130 21:19:41.940448 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-v7lbj" Jan 30 21:19:42 crc kubenswrapper[4721]: I0130 21:19:42.116372 4721 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2b8tc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 30 21:19:42 crc kubenswrapper[4721]: I0130 21:19:42.116429 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 30 21:19:42 crc kubenswrapper[4721]: I0130 21:19:42.158512 4721 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-pt27j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Jan 30 21:19:42 crc kubenswrapper[4721]: I0130 21:19:42.158618 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Jan 30 21:19:50 crc kubenswrapper[4721]: E0130 21:19:50.233632 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 30 21:19:50 crc kubenswrapper[4721]: E0130 21:19:50.234529 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5br7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-42958_openshift-marketplace(cc6f52ce-e313-4e62-8cd0-292c19d3cbc5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:19:50 crc kubenswrapper[4721]: E0130 21:19:50.235773 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-42958" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" Jan 30 21:19:53 crc kubenswrapper[4721]: I0130 21:19:53.115801 4721 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2b8tc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 21:19:53 crc kubenswrapper[4721]: I0130 21:19:53.116241 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 21:19:53 crc kubenswrapper[4721]: I0130 21:19:53.158624 4721 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-pt27j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 21:19:53 crc kubenswrapper[4721]: I0130 21:19:53.158994 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 21:19:53 crc kubenswrapper[4721]: I0130 21:19:53.968127 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxmsm" Jan 30 21:19:56 crc kubenswrapper[4721]: E0130 21:19:56.632277 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 30 21:19:56 crc kubenswrapper[4721]: E0130 21:19:56.632799 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dwmh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gsd7h_openshift-marketplace(d263bfe7-d31b-445d-933f-2e1bc58a8e26): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:19:56 crc kubenswrapper[4721]: E0130 21:19:56.633954 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gsd7h" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" Jan 30 21:19:58 crc kubenswrapper[4721]: E0130 21:19:58.314564 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gsd7h" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" Jan 30 21:19:58 crc kubenswrapper[4721]: E0130 21:19:58.314785 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-42958" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.385020 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.391119 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.436980 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd"] Jan 30 21:19:58 crc kubenswrapper[4721]: E0130 21:19:58.437533 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc" containerName="pruner" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437562 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc" containerName="pruner" Jan 30 21:19:58 crc kubenswrapper[4721]: E0130 21:19:58.437581 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerName="controller-manager" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437596 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerName="controller-manager" Jan 30 21:19:58 crc kubenswrapper[4721]: E0130 21:19:58.437616 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437631 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" Jan 30 21:19:58 crc kubenswrapper[4721]: E0130 21:19:58.437670 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57505fbf-a7f7-45ff-91bb-f3463567721e" containerName="collect-profiles" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437683 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="57505fbf-a7f7-45ff-91bb-f3463567721e" containerName="collect-profiles" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437890 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="56b94dee-4bbc-43dd-b8e6-5fbbbd0020fc" containerName="pruner" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437921 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" containerName="route-controller-manager" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437946 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" containerName="controller-manager" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.437968 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="57505fbf-a7f7-45ff-91bb-f3463567721e" containerName="collect-profiles" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.438924 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.456895 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd"] Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480432 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9p95\" (UniqueName: \"kubernetes.io/projected/9d5b2021-f707-45bf-9890-d0ec722cd52f-kube-api-access-v9p95\") pod \"9d5b2021-f707-45bf-9890-d0ec722cd52f\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480597 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d5b2021-f707-45bf-9890-d0ec722cd52f-serving-cert\") pod \"9d5b2021-f707-45bf-9890-d0ec722cd52f\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480664 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-config\") pod \"9d5b2021-f707-45bf-9890-d0ec722cd52f\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480758 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-config\") pod \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480820 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-client-ca\") pod \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480909 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsmdl\" (UniqueName: \"kubernetes.io/projected/fe55c0cf-cc81-4cd9-94d8-5637539acba4-kube-api-access-dsmdl\") pod \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480935 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-client-ca\") pod \"9d5b2021-f707-45bf-9890-d0ec722cd52f\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480961 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe55c0cf-cc81-4cd9-94d8-5637539acba4-serving-cert\") pod \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\" (UID: \"fe55c0cf-cc81-4cd9-94d8-5637539acba4\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.480985 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-proxy-ca-bundles\") pod \"9d5b2021-f707-45bf-9890-d0ec722cd52f\" (UID: \"9d5b2021-f707-45bf-9890-d0ec722cd52f\") " Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.483903 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-config" (OuterVolumeSpecName: "config") pod "9d5b2021-f707-45bf-9890-d0ec722cd52f" (UID: "9d5b2021-f707-45bf-9890-d0ec722cd52f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.484747 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-client-ca" (OuterVolumeSpecName: "client-ca") pod "9d5b2021-f707-45bf-9890-d0ec722cd52f" (UID: "9d5b2021-f707-45bf-9890-d0ec722cd52f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.485644 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-config" (OuterVolumeSpecName: "config") pod "fe55c0cf-cc81-4cd9-94d8-5637539acba4" (UID: "fe55c0cf-cc81-4cd9-94d8-5637539acba4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.485747 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-client-ca" (OuterVolumeSpecName: "client-ca") pod "fe55c0cf-cc81-4cd9-94d8-5637539acba4" (UID: "fe55c0cf-cc81-4cd9-94d8-5637539acba4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.485954 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "9d5b2021-f707-45bf-9890-d0ec722cd52f" (UID: "9d5b2021-f707-45bf-9890-d0ec722cd52f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.488684 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d5b2021-f707-45bf-9890-d0ec722cd52f-kube-api-access-v9p95" (OuterVolumeSpecName: "kube-api-access-v9p95") pod "9d5b2021-f707-45bf-9890-d0ec722cd52f" (UID: "9d5b2021-f707-45bf-9890-d0ec722cd52f"). InnerVolumeSpecName "kube-api-access-v9p95". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.488721 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe55c0cf-cc81-4cd9-94d8-5637539acba4-kube-api-access-dsmdl" (OuterVolumeSpecName: "kube-api-access-dsmdl") pod "fe55c0cf-cc81-4cd9-94d8-5637539acba4" (UID: "fe55c0cf-cc81-4cd9-94d8-5637539acba4"). InnerVolumeSpecName "kube-api-access-dsmdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.489589 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe55c0cf-cc81-4cd9-94d8-5637539acba4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fe55c0cf-cc81-4cd9-94d8-5637539acba4" (UID: "fe55c0cf-cc81-4cd9-94d8-5637539acba4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.498211 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d5b2021-f707-45bf-9890-d0ec722cd52f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d5b2021-f707-45bf-9890-d0ec722cd52f" (UID: "9d5b2021-f707-45bf-9890-d0ec722cd52f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582339 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fc18041-4adb-4ea5-ba2e-98c383aa4747-serving-cert\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582403 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-config\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582489 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7qgx\" (UniqueName: \"kubernetes.io/projected/7fc18041-4adb-4ea5-ba2e-98c383aa4747-kube-api-access-v7qgx\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582654 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-client-ca\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582697 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsmdl\" (UniqueName: \"kubernetes.io/projected/fe55c0cf-cc81-4cd9-94d8-5637539acba4-kube-api-access-dsmdl\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582710 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582721 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe55c0cf-cc81-4cd9-94d8-5637539acba4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582730 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582738 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9p95\" (UniqueName: \"kubernetes.io/projected/9d5b2021-f707-45bf-9890-d0ec722cd52f-kube-api-access-v9p95\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582747 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d5b2021-f707-45bf-9890-d0ec722cd52f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582758 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d5b2021-f707-45bf-9890-d0ec722cd52f-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582769 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.582846 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe55c0cf-cc81-4cd9-94d8-5637539acba4-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.629721 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" event={"ID":"9d5b2021-f707-45bf-9890-d0ec722cd52f","Type":"ContainerDied","Data":"b45760c8ed6be0a742ab04b0c7e58873dc6fdf2f6fd8f951e1e554a4a463b6a9"} Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.629820 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2b8tc" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.629832 4721 scope.go:117] "RemoveContainer" containerID="45be91227b8316c673b8c0028c6343161b27d9bbfcfc328217c217fd28684894" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.631812 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" event={"ID":"fe55c0cf-cc81-4cd9-94d8-5637539acba4","Type":"ContainerDied","Data":"593940af64e56c2e48b19bb96d748346f047e4cf5973771d2d42ef6d4cd82f60"} Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.631905 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.682152 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j"] Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.683929 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-client-ca\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.683970 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fc18041-4adb-4ea5-ba2e-98c383aa4747-serving-cert\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.684001 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-config\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.684055 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7qgx\" (UniqueName: \"kubernetes.io/projected/7fc18041-4adb-4ea5-ba2e-98c383aa4747-kube-api-access-v7qgx\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.687596 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pt27j"] Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.688962 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-config\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.693607 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-client-ca\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.698571 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2b8tc"] Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.698626 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2b8tc"] Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.701065 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fc18041-4adb-4ea5-ba2e-98c383aa4747-serving-cert\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.709660 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7qgx\" (UniqueName: \"kubernetes.io/projected/7fc18041-4adb-4ea5-ba2e-98c383aa4747-kube-api-access-v7qgx\") pod \"route-controller-manager-58b9b5c9db-2ttkd\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:58 crc kubenswrapper[4721]: I0130 21:19:58.760350 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:19:59 crc kubenswrapper[4721]: I0130 21:19:59.448540 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:19:59 crc kubenswrapper[4721]: I0130 21:19:59.448624 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:19:59 crc kubenswrapper[4721]: E0130 21:19:59.652483 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1868246698/2\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 30 21:19:59 crc kubenswrapper[4721]: E0130 21:19:59.652632 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jw2fp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-855fx_openshift-marketplace(60ea230f-98d6-4955-bf6a-71a91d65ff20): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1868246698/2\": happened during read: context canceled" logger="UnhandledError" Jan 30 21:19:59 crc kubenswrapper[4721]: E0130 21:19:59.653859 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage1868246698/2\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-marketplace-855fx" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" Jan 30 21:20:00 crc kubenswrapper[4721]: I0130 21:20:00.100179 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d5b2021-f707-45bf-9890-d0ec722cd52f" path="/var/lib/kubelet/pods/9d5b2021-f707-45bf-9890-d0ec722cd52f/volumes" Jan 30 21:20:00 crc kubenswrapper[4721]: I0130 21:20:00.101004 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe55c0cf-cc81-4cd9-94d8-5637539acba4" path="/var/lib/kubelet/pods/fe55c0cf-cc81-4cd9-94d8-5637539acba4/volumes" Jan 30 21:20:00 crc kubenswrapper[4721]: I0130 21:20:00.382790 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 21:20:00 crc kubenswrapper[4721]: E0130 21:20:00.516486 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 30 21:20:00 crc kubenswrapper[4721]: E0130 21:20:00.516783 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gj5cj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wrlqs_openshift-marketplace(ecc44241-f566-4099-b6cf-adf0420a14f1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:20:00 crc kubenswrapper[4721]: E0130 21:20:00.517842 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wrlqs" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" Jan 30 21:20:00 crc kubenswrapper[4721]: E0130 21:20:00.524850 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 30 21:20:00 crc kubenswrapper[4721]: E0130 21:20:00.525026 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qhlnt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-9mhm6_openshift-marketplace(315507aa-1e32-4360-b5b0-aa3625a10b0b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:20:00 crc kubenswrapper[4721]: E0130 21:20:00.526819 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-9mhm6" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.002693 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5869995894-mqpmh"] Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.005382 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.007786 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.008038 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.009201 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.009799 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.009987 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.010312 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.014037 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5869995894-mqpmh"] Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.015157 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.126406 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-proxy-ca-bundles\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.126464 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-config\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.126489 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-client-ca\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.126511 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7549d96-9e33-46b7-923d-55646fbdcfba-serving-cert\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.126534 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4v2m\" (UniqueName: \"kubernetes.io/projected/b7549d96-9e33-46b7-923d-55646fbdcfba-kube-api-access-c4v2m\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.228219 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-proxy-ca-bundles\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.228274 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-config\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.228319 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-client-ca\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.228344 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7549d96-9e33-46b7-923d-55646fbdcfba-serving-cert\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.228365 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4v2m\" (UniqueName: \"kubernetes.io/projected/b7549d96-9e33-46b7-923d-55646fbdcfba-kube-api-access-c4v2m\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.229981 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-client-ca\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.231361 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-config\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.234909 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-proxy-ca-bundles\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.235899 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7549d96-9e33-46b7-923d-55646fbdcfba-serving-cert\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.243158 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4v2m\" (UniqueName: \"kubernetes.io/projected/b7549d96-9e33-46b7-923d-55646fbdcfba-kube-api-access-c4v2m\") pod \"controller-manager-5869995894-mqpmh\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.297928 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.298614 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.300453 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.301048 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.318045 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.331407 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.430694 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/624c16c0-76a3-4a3d-982d-2132eeaba773-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.430738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/624c16c0-76a3-4a3d-982d-2132eeaba773-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.532335 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/624c16c0-76a3-4a3d-982d-2132eeaba773-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.532736 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/624c16c0-76a3-4a3d-982d-2132eeaba773-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.532856 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/624c16c0-76a3-4a3d-982d-2132eeaba773-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.550794 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/624c16c0-76a3-4a3d-982d-2132eeaba773-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:01 crc kubenswrapper[4721]: I0130 21:20:01.627761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:04 crc kubenswrapper[4721]: E0130 21:20:04.064341 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wrlqs" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" Jan 30 21:20:04 crc kubenswrapper[4721]: E0130 21:20:04.066397 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-9mhm6" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" Jan 30 21:20:04 crc kubenswrapper[4721]: E0130 21:20:04.101416 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 30 21:20:04 crc kubenswrapper[4721]: E0130 21:20:04.101577 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tctzl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-b48x7_openshift-marketplace(52ca26dd-2938-4cf4-b812-4a4bcb014d5b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:20:04 crc kubenswrapper[4721]: E0130 21:20:04.103203 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-b48x7" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.239667 4721 scope.go:117] "RemoveContainer" containerID="3ca21a265ffe19857eb6a00b060c373fc393d1568a9554a1f89291d339ed8616" Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.598943 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-bkv95"] Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.653862 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5869995894-mqpmh"] Jan 30 21:20:04 crc kubenswrapper[4721]: W0130 21:20:04.655177 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19fca1ba_eb6d_479c_90ff_e55739aed640.slice/crio-d50bd14ad4eb2747b24fa9c7a5aefca7d9f9c590ba04de83e85c8e79a062783b WatchSource:0}: Error finding container d50bd14ad4eb2747b24fa9c7a5aefca7d9f9c590ba04de83e85c8e79a062783b: Status 404 returned error can't find the container with id d50bd14ad4eb2747b24fa9c7a5aefca7d9f9c590ba04de83e85c8e79a062783b Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.679359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bkv95" event={"ID":"19fca1ba-eb6d-479c-90ff-e55739aed640","Type":"ContainerStarted","Data":"d50bd14ad4eb2747b24fa9c7a5aefca7d9f9c590ba04de83e85c8e79a062783b"} Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.688662 4721 generic.go:334] "Generic (PLEG): container finished" podID="3179d84c-16bd-405b-ac42-38a710f7a713" containerID="ce06b7dd514e72160aeb454a4b9f7708f6e8d995e3646acd3d0ddd59f602cd37" exitCode=0 Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.689149 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bq7" event={"ID":"3179d84c-16bd-405b-ac42-38a710f7a713","Type":"ContainerDied","Data":"ce06b7dd514e72160aeb454a4b9f7708f6e8d995e3646acd3d0ddd59f602cd37"} Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.699746 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerStarted","Data":"6344fa6cdddb30ccde75213175f779345242ebec8def900fdfb90fbf6cf3be28"} Jan 30 21:20:04 crc kubenswrapper[4721]: E0130 21:20:04.713245 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-b48x7" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.763616 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 21:20:04 crc kubenswrapper[4721]: I0130 21:20:04.781646 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd"] Jan 30 21:20:04 crc kubenswrapper[4721]: W0130 21:20:04.803894 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fc18041_4adb_4ea5_ba2e_98c383aa4747.slice/crio-5e8790d787d40fcd43b5acd79b907db4be411f0fd93124275e45afa55cc1e2dd WatchSource:0}: Error finding container 5e8790d787d40fcd43b5acd79b907db4be411f0fd93124275e45afa55cc1e2dd: Status 404 returned error can't find the container with id 5e8790d787d40fcd43b5acd79b907db4be411f0fd93124275e45afa55cc1e2dd Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.721750 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" event={"ID":"b7549d96-9e33-46b7-923d-55646fbdcfba","Type":"ContainerStarted","Data":"2eb0b88466271b4a9cd160894e4164cbcefde9f2bee903697ef219d1e6a0ef25"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.722499 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.722513 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" event={"ID":"b7549d96-9e33-46b7-923d-55646fbdcfba","Type":"ContainerStarted","Data":"11b03a8f4d8139c17ded29c760b02499d072812a1c75bdd35dde6a3ede9dad67"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.724371 4721 generic.go:334] "Generic (PLEG): container finished" podID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerID="6344fa6cdddb30ccde75213175f779345242ebec8def900fdfb90fbf6cf3be28" exitCode=0 Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.724507 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerDied","Data":"6344fa6cdddb30ccde75213175f779345242ebec8def900fdfb90fbf6cf3be28"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.727911 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.729662 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" event={"ID":"7fc18041-4adb-4ea5-ba2e-98c383aa4747","Type":"ContainerStarted","Data":"26e757815bbfe0a0f7a93391c873d274293f2d031cd2f90ab2655379b4d5850c"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.729708 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" event={"ID":"7fc18041-4adb-4ea5-ba2e-98c383aa4747","Type":"ContainerStarted","Data":"5e8790d787d40fcd43b5acd79b907db4be411f0fd93124275e45afa55cc1e2dd"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.729864 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.734663 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"624c16c0-76a3-4a3d-982d-2132eeaba773","Type":"ContainerStarted","Data":"05b99ea7c6406417278f849b1c9321d2d64ca7698e3b147863faca1ee387d64a"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.734715 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"624c16c0-76a3-4a3d-982d-2132eeaba773","Type":"ContainerStarted","Data":"30d71833d1e9ca928a6322ede8f5ef9b1d20a92ff0635618991069fa0391d153"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.735441 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.737055 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bkv95" event={"ID":"19fca1ba-eb6d-479c-90ff-e55739aed640","Type":"ContainerStarted","Data":"742289394c875b90fa588c3afeebcac730c1df569ed7d8f3ba69098b466f4c7b"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.737096 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bkv95" event={"ID":"19fca1ba-eb6d-479c-90ff-e55739aed640","Type":"ContainerStarted","Data":"8845c67ec02856abb469db0371414774ea82aba6db22e2eb394852817d999c96"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.741255 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bq7" event={"ID":"3179d84c-16bd-405b-ac42-38a710f7a713","Type":"ContainerStarted","Data":"e0baf73e55a57753bcb6a13be9e1a3321fc2e9b85cd7a003b00b00dbf7de67cf"} Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.745164 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" podStartSLOduration=9.745140252 podStartE2EDuration="9.745140252s" podCreationTimestamp="2026-01-30 21:19:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:05.742182675 +0000 UTC m=+194.534083931" watchObservedRunningTime="2026-01-30 21:20:05.745140252 +0000 UTC m=+194.537041488" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.761612 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w4bq7" podStartSLOduration=2.818027801 podStartE2EDuration="43.761593511s" podCreationTimestamp="2026-01-30 21:19:22 +0000 UTC" firstStartedPulling="2026-01-30 21:19:24.252214962 +0000 UTC m=+153.044116208" lastFinishedPulling="2026-01-30 21:20:05.195780672 +0000 UTC m=+193.987681918" observedRunningTime="2026-01-30 21:20:05.75728041 +0000 UTC m=+194.549181666" watchObservedRunningTime="2026-01-30 21:20:05.761593511 +0000 UTC m=+194.553494757" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.774170 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=4.774151883 podStartE2EDuration="4.774151883s" podCreationTimestamp="2026-01-30 21:20:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:05.770927407 +0000 UTC m=+194.562828653" watchObservedRunningTime="2026-01-30 21:20:05.774151883 +0000 UTC m=+194.566053129" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.819820 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" podStartSLOduration=9.819796609 podStartE2EDuration="9.819796609s" podCreationTimestamp="2026-01-30 21:19:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:05.808994664 +0000 UTC m=+194.600895920" watchObservedRunningTime="2026-01-30 21:20:05.819796609 +0000 UTC m=+194.611697855" Jan 30 21:20:05 crc kubenswrapper[4721]: I0130 21:20:05.867718 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-bkv95" podStartSLOduration=171.867695588 podStartE2EDuration="2m51.867695588s" podCreationTimestamp="2026-01-30 21:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:05.865903579 +0000 UTC m=+194.657804825" watchObservedRunningTime="2026-01-30 21:20:05.867695588 +0000 UTC m=+194.659596834" Jan 30 21:20:06 crc kubenswrapper[4721]: I0130 21:20:06.748673 4721 generic.go:334] "Generic (PLEG): container finished" podID="624c16c0-76a3-4a3d-982d-2132eeaba773" containerID="05b99ea7c6406417278f849b1c9321d2d64ca7698e3b147863faca1ee387d64a" exitCode=0 Jan 30 21:20:06 crc kubenswrapper[4721]: I0130 21:20:06.748788 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"624c16c0-76a3-4a3d-982d-2132eeaba773","Type":"ContainerDied","Data":"05b99ea7c6406417278f849b1c9321d2d64ca7698e3b147863faca1ee387d64a"} Jan 30 21:20:07 crc kubenswrapper[4721]: I0130 21:20:07.756707 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerStarted","Data":"79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63"} Jan 30 21:20:07 crc kubenswrapper[4721]: I0130 21:20:07.774080 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9khds" podStartSLOduration=4.615976057 podStartE2EDuration="43.774063364s" podCreationTimestamp="2026-01-30 21:19:24 +0000 UTC" firstStartedPulling="2026-01-30 21:19:27.403430655 +0000 UTC m=+156.195331901" lastFinishedPulling="2026-01-30 21:20:06.561517962 +0000 UTC m=+195.353419208" observedRunningTime="2026-01-30 21:20:07.772975049 +0000 UTC m=+196.564876295" watchObservedRunningTime="2026-01-30 21:20:07.774063364 +0000 UTC m=+196.565964610" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.005887 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.105085 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 21:20:08 crc kubenswrapper[4721]: E0130 21:20:08.105426 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="624c16c0-76a3-4a3d-982d-2132eeaba773" containerName="pruner" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.105451 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="624c16c0-76a3-4a3d-982d-2132eeaba773" containerName="pruner" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.105621 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="624c16c0-76a3-4a3d-982d-2132eeaba773" containerName="pruner" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.108026 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.117158 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.139122 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/624c16c0-76a3-4a3d-982d-2132eeaba773-kubelet-dir\") pod \"624c16c0-76a3-4a3d-982d-2132eeaba773\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.139250 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/624c16c0-76a3-4a3d-982d-2132eeaba773-kube-api-access\") pod \"624c16c0-76a3-4a3d-982d-2132eeaba773\" (UID: \"624c16c0-76a3-4a3d-982d-2132eeaba773\") " Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.139246 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/624c16c0-76a3-4a3d-982d-2132eeaba773-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "624c16c0-76a3-4a3d-982d-2132eeaba773" (UID: "624c16c0-76a3-4a3d-982d-2132eeaba773"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.139572 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/624c16c0-76a3-4a3d-982d-2132eeaba773-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.147113 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/624c16c0-76a3-4a3d-982d-2132eeaba773-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "624c16c0-76a3-4a3d-982d-2132eeaba773" (UID: "624c16c0-76a3-4a3d-982d-2132eeaba773"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.240887 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-var-lock\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.240936 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ab260955-261a-451a-ac81-8e359c0892ef-kube-api-access\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.240960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.241031 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/624c16c0-76a3-4a3d-982d-2132eeaba773-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.342122 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-var-lock\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.342165 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ab260955-261a-451a-ac81-8e359c0892ef-kube-api-access\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.342192 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.342235 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-var-lock\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.342243 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.364359 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ab260955-261a-451a-ac81-8e359c0892ef-kube-api-access\") pod \"installer-9-crc\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.429674 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.763105 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"624c16c0-76a3-4a3d-982d-2132eeaba773","Type":"ContainerDied","Data":"30d71833d1e9ca928a6322ede8f5ef9b1d20a92ff0635618991069fa0391d153"} Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.763542 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30d71833d1e9ca928a6322ede8f5ef9b1d20a92ff0635618991069fa0391d153" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.763160 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 21:20:08 crc kubenswrapper[4721]: I0130 21:20:08.827714 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 21:20:08 crc kubenswrapper[4721]: W0130 21:20:08.837877 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podab260955_261a_451a_ac81_8e359c0892ef.slice/crio-05f19d57707d6467f1ad0b90d4013a089c795fa3d1610fa0dd06b0c778dddd62 WatchSource:0}: Error finding container 05f19d57707d6467f1ad0b90d4013a089c795fa3d1610fa0dd06b0c778dddd62: Status 404 returned error can't find the container with id 05f19d57707d6467f1ad0b90d4013a089c795fa3d1610fa0dd06b0c778dddd62 Jan 30 21:20:09 crc kubenswrapper[4721]: I0130 21:20:09.773329 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ab260955-261a-451a-ac81-8e359c0892ef","Type":"ContainerStarted","Data":"170c2c04fd1f1d20d69257aadac4bd9510ec5dd5039133e0373510675e3b8fe8"} Jan 30 21:20:09 crc kubenswrapper[4721]: I0130 21:20:09.773682 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ab260955-261a-451a-ac81-8e359c0892ef","Type":"ContainerStarted","Data":"05f19d57707d6467f1ad0b90d4013a089c795fa3d1610fa0dd06b0c778dddd62"} Jan 30 21:20:09 crc kubenswrapper[4721]: I0130 21:20:09.793413 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.79336876 podStartE2EDuration="1.79336876s" podCreationTimestamp="2026-01-30 21:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:09.785479629 +0000 UTC m=+198.577380885" watchObservedRunningTime="2026-01-30 21:20:09.79336876 +0000 UTC m=+198.585270006" Jan 30 21:20:11 crc kubenswrapper[4721]: I0130 21:20:11.784147 4721 generic.go:334] "Generic (PLEG): container finished" podID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerID="2816fa78bf98e536f7497588f62518ae3265ee4761d51f043bd8e1c119a96029" exitCode=0 Jan 30 21:20:11 crc kubenswrapper[4721]: I0130 21:20:11.784247 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsd7h" event={"ID":"d263bfe7-d31b-445d-933f-2e1bc58a8e26","Type":"ContainerDied","Data":"2816fa78bf98e536f7497588f62518ae3265ee4761d51f043bd8e1c119a96029"} Jan 30 21:20:13 crc kubenswrapper[4721]: I0130 21:20:13.056864 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:20:13 crc kubenswrapper[4721]: I0130 21:20:13.056916 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:20:13 crc kubenswrapper[4721]: I0130 21:20:13.674777 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:20:13 crc kubenswrapper[4721]: I0130 21:20:13.839609 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:20:14 crc kubenswrapper[4721]: I0130 21:20:14.487761 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:20:14 crc kubenswrapper[4721]: I0130 21:20:14.488201 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:20:14 crc kubenswrapper[4721]: I0130 21:20:14.530238 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:20:14 crc kubenswrapper[4721]: I0130 21:20:14.804246 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsd7h" event={"ID":"d263bfe7-d31b-445d-933f-2e1bc58a8e26","Type":"ContainerStarted","Data":"651a60a28f13d6c3891afca2f059e8bfbdb2c51794e41c9ade67dbaac35c9187"} Jan 30 21:20:14 crc kubenswrapper[4721]: I0130 21:20:14.824040 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gsd7h" podStartSLOduration=3.150973641 podStartE2EDuration="54.82402555s" podCreationTimestamp="2026-01-30 21:19:20 +0000 UTC" firstStartedPulling="2026-01-30 21:19:22.159021337 +0000 UTC m=+150.950922613" lastFinishedPulling="2026-01-30 21:20:13.832073276 +0000 UTC m=+202.623974522" observedRunningTime="2026-01-30 21:20:14.820767957 +0000 UTC m=+203.612669203" watchObservedRunningTime="2026-01-30 21:20:14.82402555 +0000 UTC m=+203.615926796" Jan 30 21:20:14 crc kubenswrapper[4721]: I0130 21:20:14.845398 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:20:17 crc kubenswrapper[4721]: I0130 21:20:17.225589 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9khds"] Jan 30 21:20:17 crc kubenswrapper[4721]: I0130 21:20:17.226522 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9khds" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="registry-server" containerID="cri-o://79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63" gracePeriod=2 Jan 30 21:20:17 crc kubenswrapper[4721]: I0130 21:20:17.823917 4721 generic.go:334] "Generic (PLEG): container finished" podID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerID="c2b9a226a1e6f465b182fa69bd04f1bdf7b7c0c118b401b881b67e1bfd5ec739" exitCode=0 Jan 30 21:20:17 crc kubenswrapper[4721]: I0130 21:20:17.823977 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42958" event={"ID":"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5","Type":"ContainerDied","Data":"c2b9a226a1e6f465b182fa69bd04f1bdf7b7c0c118b401b881b67e1bfd5ec739"} Jan 30 21:20:18 crc kubenswrapper[4721]: I0130 21:20:18.833611 4721 generic.go:334] "Generic (PLEG): container finished" podID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerID="79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63" exitCode=0 Jan 30 21:20:18 crc kubenswrapper[4721]: I0130 21:20:18.833991 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerDied","Data":"79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63"} Jan 30 21:20:21 crc kubenswrapper[4721]: I0130 21:20:21.064128 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:20:21 crc kubenswrapper[4721]: I0130 21:20:21.064564 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:20:21 crc kubenswrapper[4721]: I0130 21:20:21.119327 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:20:21 crc kubenswrapper[4721]: I0130 21:20:21.887125 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:20:24 crc kubenswrapper[4721]: E0130 21:20:24.488734 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63 is running failed: container process not found" containerID="79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 21:20:24 crc kubenswrapper[4721]: E0130 21:20:24.489712 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63 is running failed: container process not found" containerID="79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 21:20:24 crc kubenswrapper[4721]: E0130 21:20:24.490114 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63 is running failed: container process not found" containerID="79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 21:20:24 crc kubenswrapper[4721]: E0130 21:20:24.490173 4721 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-9khds" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="registry-server" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.245270 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.429474 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-catalog-content\") pod \"cf260939-466c-4142-b8bf-63d2d9a526f2\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.430445 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmdg4\" (UniqueName: \"kubernetes.io/projected/cf260939-466c-4142-b8bf-63d2d9a526f2-kube-api-access-fmdg4\") pod \"cf260939-466c-4142-b8bf-63d2d9a526f2\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.430723 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-utilities\") pod \"cf260939-466c-4142-b8bf-63d2d9a526f2\" (UID: \"cf260939-466c-4142-b8bf-63d2d9a526f2\") " Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.431742 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-utilities" (OuterVolumeSpecName: "utilities") pod "cf260939-466c-4142-b8bf-63d2d9a526f2" (UID: "cf260939-466c-4142-b8bf-63d2d9a526f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.436452 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf260939-466c-4142-b8bf-63d2d9a526f2-kube-api-access-fmdg4" (OuterVolumeSpecName: "kube-api-access-fmdg4") pod "cf260939-466c-4142-b8bf-63d2d9a526f2" (UID: "cf260939-466c-4142-b8bf-63d2d9a526f2"). InnerVolumeSpecName "kube-api-access-fmdg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.532203 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmdg4\" (UniqueName: \"kubernetes.io/projected/cf260939-466c-4142-b8bf-63d2d9a526f2-kube-api-access-fmdg4\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.532245 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.712965 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf260939-466c-4142-b8bf-63d2d9a526f2" (UID: "cf260939-466c-4142-b8bf-63d2d9a526f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.735983 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf260939-466c-4142-b8bf-63d2d9a526f2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.903226 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9khds" event={"ID":"cf260939-466c-4142-b8bf-63d2d9a526f2","Type":"ContainerDied","Data":"49c9cf8ed131d3ad990733b900ac0c8977f803bf72ae9d098b75caa1aa42809f"} Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.903278 4721 scope.go:117] "RemoveContainer" containerID="79d20fddfc1377edb267ab5569c2ccf945a6f91e4210df2f38e0888219436f63" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.903395 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9khds" Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.934748 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9khds"] Jan 30 21:20:27 crc kubenswrapper[4721]: I0130 21:20:27.942326 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9khds"] Jan 30 21:20:28 crc kubenswrapper[4721]: I0130 21:20:28.100818 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" path="/var/lib/kubelet/pods/cf260939-466c-4142-b8bf-63d2d9a526f2/volumes" Jan 30 21:20:29 crc kubenswrapper[4721]: I0130 21:20:29.448631 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:20:29 crc kubenswrapper[4721]: I0130 21:20:29.448704 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:20:29 crc kubenswrapper[4721]: I0130 21:20:29.448756 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:20:29 crc kubenswrapper[4721]: I0130 21:20:29.449410 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:20:29 crc kubenswrapper[4721]: I0130 21:20:29.449509 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043" gracePeriod=600 Jan 30 21:20:30 crc kubenswrapper[4721]: I0130 21:20:30.922186 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043" exitCode=0 Jan 30 21:20:30 crc kubenswrapper[4721]: I0130 21:20:30.922230 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043"} Jan 30 21:20:31 crc kubenswrapper[4721]: I0130 21:20:31.713026 4721 scope.go:117] "RemoveContainer" containerID="6344fa6cdddb30ccde75213175f779345242ebec8def900fdfb90fbf6cf3be28" Jan 30 21:20:32 crc kubenswrapper[4721]: I0130 21:20:32.023469 4721 scope.go:117] "RemoveContainer" containerID="e7c8894979a5e43275cef01265629816d301c33f061014a518600809ddc107a9" Jan 30 21:20:36 crc kubenswrapper[4721]: I0130 21:20:36.497595 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5869995894-mqpmh"] Jan 30 21:20:36 crc kubenswrapper[4721]: I0130 21:20:36.498944 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" podUID="b7549d96-9e33-46b7-923d-55646fbdcfba" containerName="controller-manager" containerID="cri-o://2eb0b88466271b4a9cd160894e4164cbcefde9f2bee903697ef219d1e6a0ef25" gracePeriod=30 Jan 30 21:20:36 crc kubenswrapper[4721]: I0130 21:20:36.637162 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd"] Jan 30 21:20:36 crc kubenswrapper[4721]: I0130 21:20:36.637438 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" podUID="7fc18041-4adb-4ea5-ba2e-98c383aa4747" containerName="route-controller-manager" containerID="cri-o://26e757815bbfe0a0f7a93391c873d274293f2d031cd2f90ab2655379b4d5850c" gracePeriod=30 Jan 30 21:20:36 crc kubenswrapper[4721]: I0130 21:20:36.962329 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"22a9d3c345d244973800e77dbe9cd0a1ed3e1b5e0c717d90032aeb0efb193236"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.971722 4721 generic.go:334] "Generic (PLEG): container finished" podID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerID="cf357cfb88d2e54bc8d0c47a266a5433480088d4912a8613985b306f90aeee27" exitCode=0 Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.971817 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-855fx" event={"ID":"60ea230f-98d6-4955-bf6a-71a91d65ff20","Type":"ContainerDied","Data":"cf357cfb88d2e54bc8d0c47a266a5433480088d4912a8613985b306f90aeee27"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.974984 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerStarted","Data":"74517c4563f1526bf657769a76bb8ca3079b1bf133236cbd484e73a03c9e6683"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.977145 4721 generic.go:334] "Generic (PLEG): container finished" podID="7fc18041-4adb-4ea5-ba2e-98c383aa4747" containerID="26e757815bbfe0a0f7a93391c873d274293f2d031cd2f90ab2655379b4d5850c" exitCode=0 Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.977204 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" event={"ID":"7fc18041-4adb-4ea5-ba2e-98c383aa4747","Type":"ContainerDied","Data":"26e757815bbfe0a0f7a93391c873d274293f2d031cd2f90ab2655379b4d5850c"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.979796 4721 generic.go:334] "Generic (PLEG): container finished" podID="b7549d96-9e33-46b7-923d-55646fbdcfba" containerID="2eb0b88466271b4a9cd160894e4164cbcefde9f2bee903697ef219d1e6a0ef25" exitCode=0 Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.979918 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" event={"ID":"b7549d96-9e33-46b7-923d-55646fbdcfba","Type":"ContainerDied","Data":"2eb0b88466271b4a9cd160894e4164cbcefde9f2bee903697ef219d1e6a0ef25"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.982706 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42958" event={"ID":"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5","Type":"ContainerStarted","Data":"6583ab05257f1e388ac96fef9c9649b0eb34b342f7ae83bb64283c7ca78a6a67"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.985605 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerStarted","Data":"be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19"} Jan 30 21:20:37 crc kubenswrapper[4721]: I0130 21:20:37.989850 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerStarted","Data":"69a7e4800e1c5ff03b1a0ccc9acb6418e43ca9b0be6ce78868b813eff7c9dee3"} Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.129321 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-42958" podStartSLOduration=7.206688208 podStartE2EDuration="1m17.129289929s" podCreationTimestamp="2026-01-30 21:19:21 +0000 UTC" firstStartedPulling="2026-01-30 21:19:22.145820935 +0000 UTC m=+150.937722221" lastFinishedPulling="2026-01-30 21:20:32.068422666 +0000 UTC m=+220.860323942" observedRunningTime="2026-01-30 21:20:38.10261327 +0000 UTC m=+226.894514516" watchObservedRunningTime="2026-01-30 21:20:38.129289929 +0000 UTC m=+226.921191165" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.355894 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384153 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh"] Jan 30 21:20:38 crc kubenswrapper[4721]: E0130 21:20:38.384371 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="registry-server" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384384 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="registry-server" Jan 30 21:20:38 crc kubenswrapper[4721]: E0130 21:20:38.384396 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fc18041-4adb-4ea5-ba2e-98c383aa4747" containerName="route-controller-manager" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384402 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fc18041-4adb-4ea5-ba2e-98c383aa4747" containerName="route-controller-manager" Jan 30 21:20:38 crc kubenswrapper[4721]: E0130 21:20:38.384410 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="extract-utilities" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384417 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="extract-utilities" Jan 30 21:20:38 crc kubenswrapper[4721]: E0130 21:20:38.384433 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="extract-content" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384439 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="extract-content" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384532 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf260939-466c-4142-b8bf-63d2d9a526f2" containerName="registry-server" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384544 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fc18041-4adb-4ea5-ba2e-98c383aa4747" containerName="route-controller-manager" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.384934 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.396686 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh"] Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.415573 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-config\") pod \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.415728 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-client-ca\") pod \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.415780 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fc18041-4adb-4ea5-ba2e-98c383aa4747-serving-cert\") pod \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.415814 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7qgx\" (UniqueName: \"kubernetes.io/projected/7fc18041-4adb-4ea5-ba2e-98c383aa4747-kube-api-access-v7qgx\") pod \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\" (UID: \"7fc18041-4adb-4ea5-ba2e-98c383aa4747\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.416207 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-serving-cert\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.416251 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-config\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.416275 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-client-ca\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.416353 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crxr4\" (UniqueName: \"kubernetes.io/projected/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-kube-api-access-crxr4\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.418196 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-client-ca" (OuterVolumeSpecName: "client-ca") pod "7fc18041-4adb-4ea5-ba2e-98c383aa4747" (UID: "7fc18041-4adb-4ea5-ba2e-98c383aa4747"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.418309 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-config" (OuterVolumeSpecName: "config") pod "7fc18041-4adb-4ea5-ba2e-98c383aa4747" (UID: "7fc18041-4adb-4ea5-ba2e-98c383aa4747"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.424537 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fc18041-4adb-4ea5-ba2e-98c383aa4747-kube-api-access-v7qgx" (OuterVolumeSpecName: "kube-api-access-v7qgx") pod "7fc18041-4adb-4ea5-ba2e-98c383aa4747" (UID: "7fc18041-4adb-4ea5-ba2e-98c383aa4747"). InnerVolumeSpecName "kube-api-access-v7qgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.436685 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fc18041-4adb-4ea5-ba2e-98c383aa4747-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7fc18041-4adb-4ea5-ba2e-98c383aa4747" (UID: "7fc18041-4adb-4ea5-ba2e-98c383aa4747"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.517960 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crxr4\" (UniqueName: \"kubernetes.io/projected/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-kube-api-access-crxr4\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518044 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-serving-cert\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518076 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-config\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518099 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-client-ca\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518159 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518174 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fc18041-4adb-4ea5-ba2e-98c383aa4747-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518188 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7qgx\" (UniqueName: \"kubernetes.io/projected/7fc18041-4adb-4ea5-ba2e-98c383aa4747-kube-api-access-v7qgx\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.518202 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fc18041-4adb-4ea5-ba2e-98c383aa4747-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.521195 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-config\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.525222 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-client-ca\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.540356 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-serving-cert\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.542597 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crxr4\" (UniqueName: \"kubernetes.io/projected/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-kube-api-access-crxr4\") pod \"route-controller-manager-6874c7d55b-kwbsh\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.699112 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.840331 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.922746 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7549d96-9e33-46b7-923d-55646fbdcfba-serving-cert\") pod \"b7549d96-9e33-46b7-923d-55646fbdcfba\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.922863 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4v2m\" (UniqueName: \"kubernetes.io/projected/b7549d96-9e33-46b7-923d-55646fbdcfba-kube-api-access-c4v2m\") pod \"b7549d96-9e33-46b7-923d-55646fbdcfba\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.922916 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-proxy-ca-bundles\") pod \"b7549d96-9e33-46b7-923d-55646fbdcfba\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.922939 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-client-ca\") pod \"b7549d96-9e33-46b7-923d-55646fbdcfba\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.922989 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-config\") pod \"b7549d96-9e33-46b7-923d-55646fbdcfba\" (UID: \"b7549d96-9e33-46b7-923d-55646fbdcfba\") " Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.923967 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b7549d96-9e33-46b7-923d-55646fbdcfba" (UID: "b7549d96-9e33-46b7-923d-55646fbdcfba"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.924024 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-config" (OuterVolumeSpecName: "config") pod "b7549d96-9e33-46b7-923d-55646fbdcfba" (UID: "b7549d96-9e33-46b7-923d-55646fbdcfba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.924403 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-client-ca" (OuterVolumeSpecName: "client-ca") pod "b7549d96-9e33-46b7-923d-55646fbdcfba" (UID: "b7549d96-9e33-46b7-923d-55646fbdcfba"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.927112 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7549d96-9e33-46b7-923d-55646fbdcfba-kube-api-access-c4v2m" (OuterVolumeSpecName: "kube-api-access-c4v2m") pod "b7549d96-9e33-46b7-923d-55646fbdcfba" (UID: "b7549d96-9e33-46b7-923d-55646fbdcfba"). InnerVolumeSpecName "kube-api-access-c4v2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.927220 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7549d96-9e33-46b7-923d-55646fbdcfba-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b7549d96-9e33-46b7-923d-55646fbdcfba" (UID: "b7549d96-9e33-46b7-923d-55646fbdcfba"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.996579 4721 generic.go:334] "Generic (PLEG): container finished" podID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerID="74517c4563f1526bf657769a76bb8ca3079b1bf133236cbd484e73a03c9e6683" exitCode=0 Jan 30 21:20:38 crc kubenswrapper[4721]: I0130 21:20:38.996619 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerDied","Data":"74517c4563f1526bf657769a76bb8ca3079b1bf133236cbd484e73a03c9e6683"} Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:38.998220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" event={"ID":"7fc18041-4adb-4ea5-ba2e-98c383aa4747","Type":"ContainerDied","Data":"5e8790d787d40fcd43b5acd79b907db4be411f0fd93124275e45afa55cc1e2dd"} Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:38.998254 4721 scope.go:117] "RemoveContainer" containerID="26e757815bbfe0a0f7a93391c873d274293f2d031cd2f90ab2655379b4d5850c" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:38.998322 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.000047 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.000081 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5869995894-mqpmh" event={"ID":"b7549d96-9e33-46b7-923d-55646fbdcfba","Type":"ContainerDied","Data":"11b03a8f4d8139c17ded29c760b02499d072812a1c75bdd35dde6a3ede9dad67"} Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.008765 4721 generic.go:334] "Generic (PLEG): container finished" podID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerID="be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19" exitCode=0 Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.008823 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerDied","Data":"be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19"} Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.012809 4721 generic.go:334] "Generic (PLEG): container finished" podID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerID="69a7e4800e1c5ff03b1a0ccc9acb6418e43ca9b0be6ce78868b813eff7c9dee3" exitCode=0 Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.012838 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerDied","Data":"69a7e4800e1c5ff03b1a0ccc9acb6418e43ca9b0be6ce78868b813eff7c9dee3"} Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.024446 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7549d96-9e33-46b7-923d-55646fbdcfba-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.024480 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4v2m\" (UniqueName: \"kubernetes.io/projected/b7549d96-9e33-46b7-923d-55646fbdcfba-kube-api-access-c4v2m\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.024490 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.024500 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.024510 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7549d96-9e33-46b7-923d-55646fbdcfba-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.066595 4721 scope.go:117] "RemoveContainer" containerID="2eb0b88466271b4a9cd160894e4164cbcefde9f2bee903697ef219d1e6a0ef25" Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.112703 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5869995894-mqpmh"] Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.119474 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5869995894-mqpmh"] Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.130506 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh"] Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.139454 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd"] Jan 30 21:20:39 crc kubenswrapper[4721]: I0130 21:20:39.141653 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b9b5c9db-2ttkd"] Jan 30 21:20:39 crc kubenswrapper[4721]: W0130 21:20:39.171739 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b3e7a2e_35b4_44ea_9fd7_6ce7997e7398.slice/crio-1de2c37d22cdaccc1a9a4f00a09c66f9e7812a4a921f6d271af62a10fa98abbe WatchSource:0}: Error finding container 1de2c37d22cdaccc1a9a4f00a09c66f9e7812a4a921f6d271af62a10fa98abbe: Status 404 returned error can't find the container with id 1de2c37d22cdaccc1a9a4f00a09c66f9e7812a4a921f6d271af62a10fa98abbe Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.021341 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-855fx" event={"ID":"60ea230f-98d6-4955-bf6a-71a91d65ff20","Type":"ContainerStarted","Data":"123757439b6efa1e1508614c8f53c0d020d37bed52274a88202c211b6d05dada"} Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.024430 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" event={"ID":"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398","Type":"ContainerStarted","Data":"4abb0fb7353145ab59bab0ffdb7d371f50aeb5ebdfad9738c3565b31d4393fe6"} Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.024475 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" event={"ID":"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398","Type":"ContainerStarted","Data":"1de2c37d22cdaccc1a9a4f00a09c66f9e7812a4a921f6d271af62a10fa98abbe"} Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.024796 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.031415 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.043905 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-855fx" podStartSLOduration=5.220065702 podStartE2EDuration="1m17.043889214s" podCreationTimestamp="2026-01-30 21:19:23 +0000 UTC" firstStartedPulling="2026-01-30 21:19:27.403750585 +0000 UTC m=+156.195651831" lastFinishedPulling="2026-01-30 21:20:39.227574097 +0000 UTC m=+228.019475343" observedRunningTime="2026-01-30 21:20:40.0425238 +0000 UTC m=+228.834425046" watchObservedRunningTime="2026-01-30 21:20:40.043889214 +0000 UTC m=+228.835790460" Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.059517 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" podStartSLOduration=4.05949717 podStartE2EDuration="4.05949717s" podCreationTimestamp="2026-01-30 21:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:40.058431967 +0000 UTC m=+228.850333253" watchObservedRunningTime="2026-01-30 21:20:40.05949717 +0000 UTC m=+228.851398416" Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.101881 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fc18041-4adb-4ea5-ba2e-98c383aa4747" path="/var/lib/kubelet/pods/7fc18041-4adb-4ea5-ba2e-98c383aa4747/volumes" Jan 30 21:20:40 crc kubenswrapper[4721]: I0130 21:20:40.102422 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7549d96-9e33-46b7-923d-55646fbdcfba" path="/var/lib/kubelet/pods/b7549d96-9e33-46b7-923d-55646fbdcfba/volumes" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.030115 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4"] Jan 30 21:20:41 crc kubenswrapper[4721]: E0130 21:20:41.032072 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7549d96-9e33-46b7-923d-55646fbdcfba" containerName="controller-manager" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.032103 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7549d96-9e33-46b7-923d-55646fbdcfba" containerName="controller-manager" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.033129 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7549d96-9e33-46b7-923d-55646fbdcfba" containerName="controller-manager" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.034207 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.038520 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerStarted","Data":"890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0"} Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.043123 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.043472 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.043666 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.045605 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.046174 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.045901 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.051115 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerStarted","Data":"e3550519af866aad0d1314812f525603aca8d79e5eae408c426efbaeaf299ee1"} Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.061229 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.062352 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4"] Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.069455 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerStarted","Data":"893a14f1e75ca387cfd9eefcd6a7c3253bd9c383b5f22dd831b3ba8d9af07f93"} Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.111047 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b48x7" podStartSLOduration=4.002370016 podStartE2EDuration="1m18.111016361s" podCreationTimestamp="2026-01-30 21:19:23 +0000 UTC" firstStartedPulling="2026-01-30 21:19:26.392062716 +0000 UTC m=+155.183963962" lastFinishedPulling="2026-01-30 21:20:40.500709061 +0000 UTC m=+229.292610307" observedRunningTime="2026-01-30 21:20:41.097051737 +0000 UTC m=+229.888952983" watchObservedRunningTime="2026-01-30 21:20:41.111016361 +0000 UTC m=+229.902917607" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.119509 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9mhm6" podStartSLOduration=3.001745584 podStartE2EDuration="1m21.119491581s" podCreationTimestamp="2026-01-30 21:19:20 +0000 UTC" firstStartedPulling="2026-01-30 21:19:22.159075909 +0000 UTC m=+150.950977195" lastFinishedPulling="2026-01-30 21:20:40.276821946 +0000 UTC m=+229.068723192" observedRunningTime="2026-01-30 21:20:41.118794299 +0000 UTC m=+229.910695545" watchObservedRunningTime="2026-01-30 21:20:41.119491581 +0000 UTC m=+229.911392827" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.147013 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wrlqs" podStartSLOduration=3.202099317 podStartE2EDuration="1m21.146994086s" podCreationTimestamp="2026-01-30 21:19:20 +0000 UTC" firstStartedPulling="2026-01-30 21:19:22.146853828 +0000 UTC m=+150.938755074" lastFinishedPulling="2026-01-30 21:20:40.091748597 +0000 UTC m=+228.883649843" observedRunningTime="2026-01-30 21:20:41.14648637 +0000 UTC m=+229.938387616" watchObservedRunningTime="2026-01-30 21:20:41.146994086 +0000 UTC m=+229.938895332" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.149544 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67vns\" (UniqueName: \"kubernetes.io/projected/a02c0348-158b-4ef6-b3ed-cea47a4738ca-kube-api-access-67vns\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.149615 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-config\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.149652 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-client-ca\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.150286 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a02c0348-158b-4ef6-b3ed-cea47a4738ca-serving-cert\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.150358 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-proxy-ca-bundles\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.251771 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67vns\" (UniqueName: \"kubernetes.io/projected/a02c0348-158b-4ef6-b3ed-cea47a4738ca-kube-api-access-67vns\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.251913 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-config\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.251969 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-client-ca\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.252007 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a02c0348-158b-4ef6-b3ed-cea47a4738ca-serving-cert\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.252040 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-proxy-ca-bundles\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.254629 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-client-ca\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.255921 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-config\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.256337 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-proxy-ca-bundles\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.261506 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a02c0348-158b-4ef6-b3ed-cea47a4738ca-serving-cert\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.281514 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67vns\" (UniqueName: \"kubernetes.io/projected/a02c0348-158b-4ef6-b3ed-cea47a4738ca-kube-api-access-67vns\") pod \"controller-manager-54fdd69f5d-jr7j4\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.298409 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.298491 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.365605 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.477443 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.478094 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.527291 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:20:41 crc kubenswrapper[4721]: I0130 21:20:41.802355 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4"] Jan 30 21:20:42 crc kubenswrapper[4721]: I0130 21:20:42.077464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" event={"ID":"a02c0348-158b-4ef6-b3ed-cea47a4738ca","Type":"ContainerStarted","Data":"3c07b34af40ed4b0cccb1002792bc698f91d4916a61a75cd9e8285f55ba7184d"} Jan 30 21:20:42 crc kubenswrapper[4721]: I0130 21:20:42.077521 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" event={"ID":"a02c0348-158b-4ef6-b3ed-cea47a4738ca","Type":"ContainerStarted","Data":"a837b052ab4f833cdd13ed22b5cb15fc800f96f8b3d678be80aae85a8776ad0f"} Jan 30 21:20:42 crc kubenswrapper[4721]: I0130 21:20:42.141403 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:20:42 crc kubenswrapper[4721]: I0130 21:20:42.344290 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wrlqs" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="registry-server" probeResult="failure" output=< Jan 30 21:20:42 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:20:42 crc kubenswrapper[4721]: > Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.084649 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.091011 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.108483 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" podStartSLOduration=7.108459902 podStartE2EDuration="7.108459902s" podCreationTimestamp="2026-01-30 21:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:20:43.105734105 +0000 UTC m=+231.897635381" watchObservedRunningTime="2026-01-30 21:20:43.108459902 +0000 UTC m=+231.900361188" Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.245723 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xltgq"] Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.572376 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.573551 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:20:43 crc kubenswrapper[4721]: I0130 21:20:43.615324 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:20:44 crc kubenswrapper[4721]: I0130 21:20:44.074171 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:20:44 crc kubenswrapper[4721]: I0130 21:20:44.074245 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:20:44 crc kubenswrapper[4721]: I0130 21:20:44.149776 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:20:45 crc kubenswrapper[4721]: I0130 21:20:45.122665 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b48x7" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="registry-server" probeResult="failure" output=< Jan 30 21:20:45 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:20:45 crc kubenswrapper[4721]: > Jan 30 21:20:45 crc kubenswrapper[4721]: I0130 21:20:45.804038 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-42958"] Jan 30 21:20:45 crc kubenswrapper[4721]: I0130 21:20:45.805042 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-42958" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="registry-server" containerID="cri-o://6583ab05257f1e388ac96fef9c9649b0eb34b342f7ae83bb64283c7ca78a6a67" gracePeriod=2 Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.120343 4721 generic.go:334] "Generic (PLEG): container finished" podID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerID="6583ab05257f1e388ac96fef9c9649b0eb34b342f7ae83bb64283c7ca78a6a67" exitCode=0 Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.120431 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42958" event={"ID":"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5","Type":"ContainerDied","Data":"6583ab05257f1e388ac96fef9c9649b0eb34b342f7ae83bb64283c7ca78a6a67"} Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.199608 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.870959 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.871265 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="extract-utilities" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.871279 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="extract-utilities" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.871308 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="registry-server" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.871314 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="registry-server" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.871332 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="extract-content" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.871339 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="extract-content" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.871456 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" containerName="registry-server" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.871879 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.872158 4721 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.872834 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed" gracePeriod=15 Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.872843 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8" gracePeriod=15 Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.872874 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0" gracePeriod=15 Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.873878 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.872891 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae" gracePeriod=15 Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.872952 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686" gracePeriod=15 Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876751 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876801 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876828 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876836 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876848 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876855 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876863 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876869 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876884 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876890 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876905 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876911 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 21:20:46 crc kubenswrapper[4721]: E0130 21:20:46.876919 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.876925 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.877061 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.877082 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.877098 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.877112 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.877123 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.877374 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.887769 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-catalog-content\") pod \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.887861 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-utilities\") pod \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.887937 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5br7v\" (UniqueName: \"kubernetes.io/projected/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-kube-api-access-5br7v\") pod \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\" (UID: \"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5\") " Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.891887 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-utilities" (OuterVolumeSpecName: "utilities") pod "cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" (UID: "cc6f52ce-e313-4e62-8cd0-292c19d3cbc5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.930708 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.931835 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-kube-api-access-5br7v" (OuterVolumeSpecName: "kube-api-access-5br7v") pod "cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" (UID: "cc6f52ce-e313-4e62-8cd0-292c19d3cbc5"). InnerVolumeSpecName "kube-api-access-5br7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.949289 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" (UID: "cc6f52ce-e313-4e62-8cd0-292c19d3cbc5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992601 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992671 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992714 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992748 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992772 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992830 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992888 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992912 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992948 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992961 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:46 crc kubenswrapper[4721]: I0130 21:20:46.992972 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5br7v\" (UniqueName: \"kubernetes.io/projected/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5-kube-api-access-5br7v\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094454 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094508 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094544 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094629 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094656 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094670 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094623 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094623 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.094725 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095064 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095157 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095173 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095180 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095203 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.095268 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.136848 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.139002 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.141354 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0" exitCode=2 Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.144709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42958" event={"ID":"cc6f52ce-e313-4e62-8cd0-292c19d3cbc5","Type":"ContainerDied","Data":"865a730c88deb77807452c52ca260553a1f2f5cea536a223f53d77e7a71fce54"} Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.144773 4721 scope.go:117] "RemoveContainer" containerID="6583ab05257f1e388ac96fef9c9649b0eb34b342f7ae83bb64283c7ca78a6a67" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.144773 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42958" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.146045 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.146449 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.146810 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.159612 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.160195 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.160727 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.167509 4721 scope.go:117] "RemoveContainer" containerID="c2b9a226a1e6f465b182fa69bd04f1bdf7b7c0c118b401b881b67e1bfd5ec739" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.196579 4721 scope.go:117] "RemoveContainer" containerID="46afa88203cf92c9c000901f08577d6b749b4593a6c2df120aa2ce2dcb95d854" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.219582 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:20:47 crc kubenswrapper[4721]: W0130 21:20:47.243928 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-30d327966c6e034e8dc65e4313871fb32e5343bc17707425e9b2997eb9cb8db4 WatchSource:0}: Error finding container 30d327966c6e034e8dc65e4313871fb32e5343bc17707425e9b2997eb9cb8db4: Status 404 returned error can't find the container with id 30d327966c6e034e8dc65e4313871fb32e5343bc17707425e9b2997eb9cb8db4 Jan 30 21:20:47 crc kubenswrapper[4721]: E0130 21:20:47.247570 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.20:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f9f0ac8c6b5f0 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 21:20:47.24684952 +0000 UTC m=+236.038750766,LastTimestamp:2026-01-30 21:20:47.24684952 +0000 UTC m=+236.038750766,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.835542 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 30 21:20:47 crc kubenswrapper[4721]: I0130 21:20:47.836040 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.155043 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.156826 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.157865 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8" exitCode=0 Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.157914 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae" exitCode=0 Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.157935 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686" exitCode=0 Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.157949 4721 scope.go:117] "RemoveContainer" containerID="7e638f3ca8d15fb30cbd67098cbb3129ba70942c675b93f7ed2ffe009d7ca668" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.162976 4721 generic.go:334] "Generic (PLEG): container finished" podID="ab260955-261a-451a-ac81-8e359c0892ef" containerID="170c2c04fd1f1d20d69257aadac4bd9510ec5dd5039133e0373510675e3b8fe8" exitCode=0 Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.163094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ab260955-261a-451a-ac81-8e359c0892ef","Type":"ContainerDied","Data":"170c2c04fd1f1d20d69257aadac4bd9510ec5dd5039133e0373510675e3b8fe8"} Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.164082 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.164739 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.165286 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.166752 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"421b0c378590229eb94cc24e5d32cc918f846825564bc6e188a362a8315f4ea1"} Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.166801 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"30d327966c6e034e8dc65e4313871fb32e5343bc17707425e9b2997eb9cb8db4"} Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.168568 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.168949 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:48 crc kubenswrapper[4721]: I0130 21:20:48.169370 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.175396 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.568490 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.569724 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.570176 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.570726 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.738620 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-kubelet-dir\") pod \"ab260955-261a-451a-ac81-8e359c0892ef\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.738797 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ab260955-261a-451a-ac81-8e359c0892ef" (UID: "ab260955-261a-451a-ac81-8e359c0892ef"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.738826 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-var-lock\") pod \"ab260955-261a-451a-ac81-8e359c0892ef\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.738900 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-var-lock" (OuterVolumeSpecName: "var-lock") pod "ab260955-261a-451a-ac81-8e359c0892ef" (UID: "ab260955-261a-451a-ac81-8e359c0892ef"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.738956 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ab260955-261a-451a-ac81-8e359c0892ef-kube-api-access\") pod \"ab260955-261a-451a-ac81-8e359c0892ef\" (UID: \"ab260955-261a-451a-ac81-8e359c0892ef\") " Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.739356 4721 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-var-lock\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.739391 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ab260955-261a-451a-ac81-8e359c0892ef-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.750909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab260955-261a-451a-ac81-8e359c0892ef-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ab260955-261a-451a-ac81-8e359c0892ef" (UID: "ab260955-261a-451a-ac81-8e359c0892ef"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:20:49 crc kubenswrapper[4721]: I0130 21:20:49.840403 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ab260955-261a-451a-ac81-8e359c0892ef-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.187038 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ab260955-261a-451a-ac81-8e359c0892ef","Type":"ContainerDied","Data":"05f19d57707d6467f1ad0b90d4013a089c795fa3d1610fa0dd06b0c778dddd62"} Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.187104 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05f19d57707d6467f1ad0b90d4013a089c795fa3d1610fa0dd06b0c778dddd62" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.187187 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.192683 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.193086 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.193404 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:50 crc kubenswrapper[4721]: E0130 21:20:50.859899 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.20:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f9f0ac8c6b5f0 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 21:20:47.24684952 +0000 UTC m=+236.038750766,LastTimestamp:2026-01-30 21:20:47.24684952 +0000 UTC m=+236.038750766,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.869698 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.872186 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.933247 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.934022 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.934454 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.934754 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:50 crc kubenswrapper[4721]: I0130 21:20:50.935066 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.200362 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.201460 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed" exitCode=0 Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.258008 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.258794 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.259818 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.260854 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.261550 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.345853 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.346704 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.347540 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.348056 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.348571 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.348930 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.419087 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.419733 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.420173 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.420853 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.421119 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.421513 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.761554 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.763634 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.764497 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.765136 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.765725 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.766217 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.766787 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.767252 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.868656 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.868834 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.868912 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.868955 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.869025 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.869200 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.869652 4721 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.869696 4721 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:51 crc kubenswrapper[4721]: I0130 21:20:51.869718 4721 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.095356 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.095531 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.095733 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.095951 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.096991 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.098386 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.120727 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.212931 4721 scope.go:117] "RemoveContainer" containerID="eebc48a84728d12e0dead42ae9689247ec8826cdfe88876e2f0b234c1d4663b8" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.213286 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.215610 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.216215 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.216754 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.217327 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.217865 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.218470 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.219107 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.219376 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.219611 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.219816 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.220042 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.220281 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.245777 4721 scope.go:117] "RemoveContainer" containerID="15da1e4c6559ecc2040ea1040ad6d46fddf92b4017686ba609d84759fad804ae" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.267159 4721 scope.go:117] "RemoveContainer" containerID="35aca5272462b097d06819499762fdc884f39cc1229db3e2afc1d02312b8a686" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.283474 4721 scope.go:117] "RemoveContainer" containerID="a1e00e6ead6250c0e017f0876947ae2c7bfc79615a971241e991f2436958bad0" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.307532 4721 scope.go:117] "RemoveContainer" containerID="6a889f5aeb8776e7f905914fedd24678cd03e048cf3f08b38e0ecfe852680eed" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.339671 4721 scope.go:117] "RemoveContainer" containerID="1e88d17eb2fd306b6965813c4434492b980b39656b0050ce4f6221a86f90752e" Jan 30 21:20:52 crc kubenswrapper[4721]: E0130 21:20:52.896691 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: E0130 21:20:52.897634 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: E0130 21:20:52.898170 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: E0130 21:20:52.898488 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: E0130 21:20:52.898797 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:52 crc kubenswrapper[4721]: I0130 21:20:52.898827 4721 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 30 21:20:52 crc kubenswrapper[4721]: E0130 21:20:52.899254 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="200ms" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.100813 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="400ms" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.502345 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="800ms" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.556915 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:20:53Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:20:53Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:20:53Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T21:20:53Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.557525 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.557935 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.558478 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.558938 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:53 crc kubenswrapper[4721]: E0130 21:20:53.558970 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.139617 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.140955 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.141922 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.142638 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.143285 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.144108 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.144634 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.209856 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.211435 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.212175 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.212872 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.213652 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.214140 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: I0130 21:20:54.214677 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:54 crc kubenswrapper[4721]: E0130 21:20:54.303553 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="1.6s" Jan 30 21:20:55 crc kubenswrapper[4721]: E0130 21:20:55.904925 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="3.2s" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.091662 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.094872 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.095518 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.096609 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.097407 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.097913 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.098389 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:20:59 crc kubenswrapper[4721]: E0130 21:20:59.106066 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.20:6443: connect: connection refused" interval="6.4s" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.120066 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.120095 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:20:59 crc kubenswrapper[4721]: E0130 21:20:59.120471 4721 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.121127 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:20:59 crc kubenswrapper[4721]: I0130 21:20:59.268924 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"697c91def7ff77ab1b1d0fc71d1c8144f2838eadacab53e435190c52a7833614"} Jan 30 21:21:00 crc kubenswrapper[4721]: E0130 21:21:00.862155 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.20:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f9f0ac8c6b5f0 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 21:20:47.24684952 +0000 UTC m=+236.038750766,LastTimestamp:2026-01-30 21:20:47.24684952 +0000 UTC m=+236.038750766,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.289692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8cb0646d4c8dc5ce93e40b5af7c64607589c1014b40eca9b6a66b01fbc1d8aee"} Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.295210 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.295291 4721 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7" exitCode=1 Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.295393 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7"} Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.296062 4721 scope.go:117] "RemoveContainer" containerID="025e2f37544b3f025172b3434ee3562b9a750902dc66e7e5f1d0aa17065018d7" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.296686 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.297365 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.297775 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.298342 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.298968 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.299765 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:01 crc kubenswrapper[4721]: I0130 21:21:01.300342 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.097880 4721 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.098573 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.098887 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.099081 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.099267 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.099489 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.099675 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.100030 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.302686 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.302727 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:02 crc kubenswrapper[4721]: E0130 21:21:02.303375 4721 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.304015 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.304755 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.305423 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.305836 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.306171 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.306491 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.306813 4721 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.307210 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:02 crc kubenswrapper[4721]: I0130 21:21:02.538509 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.312950 4721 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="8cb0646d4c8dc5ce93e40b5af7c64607589c1014b40eca9b6a66b01fbc1d8aee" exitCode=0 Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.313107 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"8cb0646d4c8dc5ce93e40b5af7c64607589c1014b40eca9b6a66b01fbc1d8aee"} Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.314015 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.314040 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:03 crc kubenswrapper[4721]: E0130 21:21:03.314749 4721 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.314845 4721 status_manager.go:851] "Failed to get status for pod" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" pod="openshift-marketplace/certified-operators-42958" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-42958\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.315249 4721 status_manager.go:851] "Failed to get status for pod" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" pod="openshift-marketplace/community-operators-wrlqs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-wrlqs\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.315490 4721 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.315697 4721 status_manager.go:851] "Failed to get status for pod" podUID="ab260955-261a-451a-ac81-8e359c0892ef" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.315939 4721 status_manager.go:851] "Failed to get status for pod" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" pod="openshift-marketplace/redhat-operators-b48x7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-b48x7\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.316271 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.319810 4721 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:03 crc kubenswrapper[4721]: I0130 21:21:03.320154 4721 status_manager.go:851] "Failed to get status for pod" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" pod="openshift-marketplace/community-operators-9mhm6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-9mhm6\": dial tcp 38.102.83.20:6443: connect: connection refused" Jan 30 21:21:04 crc kubenswrapper[4721]: I0130 21:21:04.063951 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:21:04 crc kubenswrapper[4721]: I0130 21:21:04.321207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"038eb8ecb414517fc8be9823d1fa00b9e13e5f47a13643da549670237c176976"} Jan 30 21:21:04 crc kubenswrapper[4721]: I0130 21:21:04.325225 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 21:21:04 crc kubenswrapper[4721]: I0130 21:21:04.325310 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e8ba0a5f5ae26ec36007958c9f1936c4390327196796f5c380de1596ad6a5282"} Jan 30 21:21:05 crc kubenswrapper[4721]: I0130 21:21:05.108746 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:21:05 crc kubenswrapper[4721]: I0130 21:21:05.332932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dcf6660bb87df026721b332426c7289372dc9da3df2104bc153cbcab0eb8f7ca"} Jan 30 21:21:05 crc kubenswrapper[4721]: I0130 21:21:05.332971 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f9aac0566b05be52a0001a43b665cffb0bf0adae67429ecde89ac5183610016a"} Jan 30 21:21:05 crc kubenswrapper[4721]: I0130 21:21:05.332982 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9ef9550bf3d0662e6abaadfa4aaab6785471145eea68c017d4fa8165534c3edd"} Jan 30 21:21:06 crc kubenswrapper[4721]: I0130 21:21:06.341538 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bd9ba1c78d47644366c25c29cdf8b81ba3693f9cb32312bacab27039e6f9c637"} Jan 30 21:21:06 crc kubenswrapper[4721]: I0130 21:21:06.341962 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:06 crc kubenswrapper[4721]: I0130 21:21:06.341983 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.274783 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" podUID="aa939e70-7c23-478e-9fca-ac0632a1295a" containerName="oauth-openshift" containerID="cri-o://972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84" gracePeriod=15 Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.732672 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836137 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-cliconfig\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836180 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-dir\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836229 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-idp-0-file-data\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836275 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-router-certs\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836386 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-serving-cert\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836440 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-session\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836493 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxc7l\" (UniqueName: \"kubernetes.io/projected/aa939e70-7c23-478e-9fca-ac0632a1295a-kube-api-access-xxc7l\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836541 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-trusted-ca-bundle\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836561 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-service-ca\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836578 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-login\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836606 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-error\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836636 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-policies\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836660 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-ocp-branding-template\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.836678 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-provider-selection\") pod \"aa939e70-7c23-478e-9fca-ac0632a1295a\" (UID: \"aa939e70-7c23-478e-9fca-ac0632a1295a\") " Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.837657 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.838674 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.838752 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.838848 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.838858 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.844429 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa939e70-7c23-478e-9fca-ac0632a1295a-kube-api-access-xxc7l" (OuterVolumeSpecName: "kube-api-access-xxc7l") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "kube-api-access-xxc7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.844906 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.845385 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.846074 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.846660 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.846853 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.847200 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.850279 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.851614 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "aa939e70-7c23-478e-9fca-ac0632a1295a" (UID: "aa939e70-7c23-478e-9fca-ac0632a1295a"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.938509 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.938869 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939022 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939123 4721 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939210 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939351 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939472 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939580 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939707 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxc7l\" (UniqueName: \"kubernetes.io/projected/aa939e70-7c23-478e-9fca-ac0632a1295a-kube-api-access-xxc7l\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939825 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.939920 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.940010 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.940091 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa939e70-7c23-478e-9fca-ac0632a1295a-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:08 crc kubenswrapper[4721]: I0130 21:21:08.940181 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa939e70-7c23-478e-9fca-ac0632a1295a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.122060 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.122715 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.122924 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.128800 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.367224 4721 generic.go:334] "Generic (PLEG): container finished" podID="aa939e70-7c23-478e-9fca-ac0632a1295a" containerID="972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84" exitCode=0 Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.367266 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" event={"ID":"aa939e70-7c23-478e-9fca-ac0632a1295a","Type":"ContainerDied","Data":"972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84"} Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.367332 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" event={"ID":"aa939e70-7c23-478e-9fca-ac0632a1295a","Type":"ContainerDied","Data":"ab36736f1b81bc7d068b808c9813f2700bd4503ce24610136455262e81b24715"} Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.367350 4721 scope.go:117] "RemoveContainer" containerID="972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.367515 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xltgq" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.391632 4721 scope.go:117] "RemoveContainer" containerID="972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84" Jan 30 21:21:09 crc kubenswrapper[4721]: E0130 21:21:09.392542 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84\": container with ID starting with 972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84 not found: ID does not exist" containerID="972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84" Jan 30 21:21:09 crc kubenswrapper[4721]: I0130 21:21:09.392577 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84"} err="failed to get container status \"972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84\": rpc error: code = NotFound desc = could not find container \"972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84\": container with ID starting with 972074ae790308de5b403f0086ac16d51973de07a522591bac71a57993042e84 not found: ID does not exist" Jan 30 21:21:11 crc kubenswrapper[4721]: I0130 21:21:11.350477 4721 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:11 crc kubenswrapper[4721]: E0130 21:21:11.367252 4721 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Jan 30 21:21:11 crc kubenswrapper[4721]: I0130 21:21:11.381751 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:11 crc kubenswrapper[4721]: I0130 21:21:11.381791 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:11 crc kubenswrapper[4721]: I0130 21:21:11.388755 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:11 crc kubenswrapper[4721]: I0130 21:21:11.393883 4721 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="129d8782-3c22-4096-ab9e-337ea723254e" Jan 30 21:21:12 crc kubenswrapper[4721]: I0130 21:21:12.386781 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:12 crc kubenswrapper[4721]: I0130 21:21:12.386820 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:12 crc kubenswrapper[4721]: I0130 21:21:12.538291 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:21:12 crc kubenswrapper[4721]: I0130 21:21:12.538502 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 21:21:12 crc kubenswrapper[4721]: I0130 21:21:12.538541 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 21:21:19 crc kubenswrapper[4721]: I0130 21:21:19.126237 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 21:21:19 crc kubenswrapper[4721]: I0130 21:21:19.127171 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:19 crc kubenswrapper[4721]: I0130 21:21:19.127189 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6849017-7980-4ba5-a79f-e4f9949309b9" Jan 30 21:21:19 crc kubenswrapper[4721]: I0130 21:21:19.676744 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 30 21:21:19 crc kubenswrapper[4721]: I0130 21:21:19.734626 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 30 21:21:20 crc kubenswrapper[4721]: I0130 21:21:20.459654 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 30 21:21:20 crc kubenswrapper[4721]: I0130 21:21:20.474155 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 30 21:21:21 crc kubenswrapper[4721]: I0130 21:21:21.472138 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 30 21:21:21 crc kubenswrapper[4721]: I0130 21:21:21.937148 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 21:21:21 crc kubenswrapper[4721]: I0130 21:21:21.982683 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 30 21:21:21 crc kubenswrapper[4721]: I0130 21:21:21.996927 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.068778 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.083913 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.123671 4721 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="129d8782-3c22-4096-ab9e-337ea723254e" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.219786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.455037 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.525346 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.539132 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.539179 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.553921 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.613777 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.916781 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.968703 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 30 21:21:22 crc kubenswrapper[4721]: I0130 21:21:22.998670 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.123025 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.141354 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.177107 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.227080 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.243706 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.312786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.328713 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.523152 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.546047 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.548225 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.560735 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.596208 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.631486 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.663864 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.693453 4721 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.714174 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.716433 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.754429 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.793650 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.813539 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.881239 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.907182 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.913641 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 30 21:21:23 crc kubenswrapper[4721]: I0130 21:21:23.954221 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.023171 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.123432 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.137546 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.301399 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.399965 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.497978 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.561916 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.582020 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.604141 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.641464 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.678521 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.809413 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.829929 4721 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.895651 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 30 21:21:24 crc kubenswrapper[4721]: I0130 21:21:24.965409 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.051842 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.096673 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.120677 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.135039 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.235137 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.273516 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.352469 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.357404 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.417570 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.461504 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.544683 4721 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.548187 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.550144 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.562640 4721 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.566224 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=39.566207035 podStartE2EDuration="39.566207035s" podCreationTimestamp="2026-01-30 21:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:10.390960369 +0000 UTC m=+259.182861615" watchObservedRunningTime="2026-01-30 21:21:25.566207035 +0000 UTC m=+274.358108301" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.568149 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-42958","openshift-authentication/oauth-openshift-558db77b4-xltgq"] Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.568234 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.576837 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.592160 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=14.592139807 podStartE2EDuration="14.592139807s" podCreationTimestamp="2026-01-30 21:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:25.589083059 +0000 UTC m=+274.380984305" watchObservedRunningTime="2026-01-30 21:21:25.592139807 +0000 UTC m=+274.384041053" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.627160 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.652518 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.684033 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.706267 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.740087 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.853322 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 30 21:21:25 crc kubenswrapper[4721]: I0130 21:21:25.942262 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.040513 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.098184 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa939e70-7c23-478e-9fca-ac0632a1295a" path="/var/lib/kubelet/pods/aa939e70-7c23-478e-9fca-ac0632a1295a/volumes" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.098854 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.098959 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc6f52ce-e313-4e62-8cd0-292c19d3cbc5" path="/var/lib/kubelet/pods/cc6f52ce-e313-4e62-8cd0-292c19d3cbc5/volumes" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.164110 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.222900 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.262830 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.329472 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.451775 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.572416 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.585761 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.620021 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.634424 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.656346 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.665162 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.672605 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.678173 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.809071 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 30 21:21:26 crc kubenswrapper[4721]: I0130 21:21:26.815224 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.031397 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.127187 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.246721 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.314935 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.340209 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-984c8fd85-tp67m"] Jan 30 21:21:27 crc kubenswrapper[4721]: E0130 21:21:27.340467 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab260955-261a-451a-ac81-8e359c0892ef" containerName="installer" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.340485 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab260955-261a-451a-ac81-8e359c0892ef" containerName="installer" Jan 30 21:21:27 crc kubenswrapper[4721]: E0130 21:21:27.340504 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa939e70-7c23-478e-9fca-ac0632a1295a" containerName="oauth-openshift" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.340511 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa939e70-7c23-478e-9fca-ac0632a1295a" containerName="oauth-openshift" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.340604 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab260955-261a-451a-ac81-8e359c0892ef" containerName="installer" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.340620 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa939e70-7c23-478e-9fca-ac0632a1295a" containerName="oauth-openshift" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.341021 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.343705 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.343884 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.344009 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.344158 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.344364 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.344887 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.345016 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.345344 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.345527 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.346164 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.346672 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.346984 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.353098 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.353156 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.355356 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.359355 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.359908 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.360780 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-984c8fd85-tp67m"] Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.384611 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.390841 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425472 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425739 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425781 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-audit-policies\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425807 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-router-certs\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425852 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425882 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-error\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425902 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvxq2\" (UniqueName: \"kubernetes.io/projected/71211718-6092-4977-b735-ee91a2fa3059-kube-api-access-wvxq2\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425936 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71211718-6092-4977-b735-ee91a2fa3059-audit-dir\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.425992 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.426024 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-serving-cert\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.426075 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-login\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.426107 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-session\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.426139 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-service-ca\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.426163 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-cliconfig\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.464003 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.527806 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528160 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528266 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-audit-policies\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528382 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-router-certs\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528495 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528576 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-error\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvxq2\" (UniqueName: \"kubernetes.io/projected/71211718-6092-4977-b735-ee91a2fa3059-kube-api-access-wvxq2\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528796 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71211718-6092-4977-b735-ee91a2fa3059-audit-dir\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528861 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71211718-6092-4977-b735-ee91a2fa3059-audit-dir\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.528990 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529096 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-serving-cert\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529180 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-login\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529269 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-session\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529405 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-audit-policies\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529446 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-service-ca\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529898 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.529907 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-cliconfig\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.530139 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-service-ca\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.530428 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-cliconfig\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.534151 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-error\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.534256 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-router-certs\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.534317 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-serving-cert\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.534287 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.534565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.535430 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.535764 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-user-template-login\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.537537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/71211718-6092-4977-b735-ee91a2fa3059-v4-0-config-system-session\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.544355 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvxq2\" (UniqueName: \"kubernetes.io/projected/71211718-6092-4977-b735-ee91a2fa3059-kube-api-access-wvxq2\") pod \"oauth-openshift-984c8fd85-tp67m\" (UID: \"71211718-6092-4977-b735-ee91a2fa3059\") " pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.547865 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.618084 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.664054 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.683877 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.685794 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.788263 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.983786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.990024 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 30 21:21:27 crc kubenswrapper[4721]: I0130 21:21:27.996600 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.047398 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.064270 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.167148 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.231100 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.242892 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.265779 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.290560 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.344246 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.362929 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.414787 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.435313 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.450637 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.529861 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.655921 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.668378 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.682592 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.722389 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.731591 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.735440 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.773406 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.864492 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.877069 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.909287 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.915157 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.945965 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:21:28 crc kubenswrapper[4721]: I0130 21:21:28.982656 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.227990 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.269141 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.271502 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.285402 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-984c8fd85-tp67m"] Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.336915 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.466343 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.478916 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.486531 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" event={"ID":"71211718-6092-4977-b735-ee91a2fa3059","Type":"ContainerStarted","Data":"50bb392d7c1d065ced3736393498f7f1f264e5d6acd55e36a758d78bc079d224"} Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.603188 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.745895 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.798427 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.822890 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.868075 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.966611 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 30 21:21:29 crc kubenswrapper[4721]: I0130 21:21:29.973367 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.013475 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.088593 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.234374 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.248467 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.289688 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.341858 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.343021 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.403199 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.449535 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.473819 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.492331 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" event={"ID":"71211718-6092-4977-b735-ee91a2fa3059","Type":"ContainerStarted","Data":"f9c531cd801795a2615ff214c6bcff29789d367444fe82f2051874e872899982"} Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.492972 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.499045 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.510966 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.519132 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-984c8fd85-tp67m" podStartSLOduration=47.519116458 podStartE2EDuration="47.519116458s" podCreationTimestamp="2026-01-30 21:20:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:30.515919896 +0000 UTC m=+279.307821172" watchObservedRunningTime="2026-01-30 21:21:30.519116458 +0000 UTC m=+279.311017704" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.697980 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.742111 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.786758 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.792853 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.817395 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.906592 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.926877 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 30 21:21:30 crc kubenswrapper[4721]: I0130 21:21:30.952788 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 30 21:21:31 crc kubenswrapper[4721]: I0130 21:21:31.037494 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 30 21:21:31 crc kubenswrapper[4721]: I0130 21:21:31.402318 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 30 21:21:31 crc kubenswrapper[4721]: I0130 21:21:31.561552 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 30 21:21:31 crc kubenswrapper[4721]: I0130 21:21:31.697543 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 30 21:21:31 crc kubenswrapper[4721]: I0130 21:21:31.861437 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.005562 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.076370 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.232207 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.240749 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.250144 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.323789 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.345285 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.362123 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.418337 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.469474 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.544094 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.549055 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.566760 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.607925 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.757733 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.790293 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.822377 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 30 21:21:32 crc kubenswrapper[4721]: I0130 21:21:32.994783 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.151888 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.180823 4721 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.181080 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://421b0c378590229eb94cc24e5d32cc918f846825564bc6e188a362a8315f4ea1" gracePeriod=5 Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.364225 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.369556 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.423121 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.493458 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.625955 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.697532 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.728502 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 30 21:21:33 crc kubenswrapper[4721]: I0130 21:21:33.906907 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.179152 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.268028 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.274138 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.315997 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.388870 4721 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.426979 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.571083 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.676208 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 30 21:21:34 crc kubenswrapper[4721]: I0130 21:21:34.968585 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.081517 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.233347 4721 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.463828 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.713849 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.762883 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.783902 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.807532 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.852701 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.916192 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 30 21:21:35 crc kubenswrapper[4721]: I0130 21:21:35.919887 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 30 21:21:36 crc kubenswrapper[4721]: I0130 21:21:36.720423 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.568374 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.568472 4721 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="421b0c378590229eb94cc24e5d32cc918f846825564bc6e188a362a8315f4ea1" exitCode=137 Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.780136 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.780413 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926247 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926393 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926442 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926492 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926500 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926592 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926647 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926667 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926950 4721 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926976 4721 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.926988 4721 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.927000 4721 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:38 crc kubenswrapper[4721]: I0130 21:21:38.934037 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:21:39 crc kubenswrapper[4721]: I0130 21:21:39.028403 4721 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:39 crc kubenswrapper[4721]: I0130 21:21:39.575444 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 21:21:39 crc kubenswrapper[4721]: I0130 21:21:39.575788 4721 scope.go:117] "RemoveContainer" containerID="421b0c378590229eb94cc24e5d32cc918f846825564bc6e188a362a8315f4ea1" Jan 30 21:21:39 crc kubenswrapper[4721]: I0130 21:21:39.575846 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 21:21:40 crc kubenswrapper[4721]: I0130 21:21:40.099909 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 30 21:21:40 crc kubenswrapper[4721]: I0130 21:21:40.100175 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 30 21:21:40 crc kubenswrapper[4721]: I0130 21:21:40.110063 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 21:21:40 crc kubenswrapper[4721]: I0130 21:21:40.110105 4721 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="ac8d6758-cf6c-49e7-ae4a-c10f47aca629" Jan 30 21:21:40 crc kubenswrapper[4721]: I0130 21:21:40.113163 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 21:21:40 crc kubenswrapper[4721]: I0130 21:21:40.113187 4721 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="ac8d6758-cf6c-49e7-ae4a-c10f47aca629" Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.459457 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4"] Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.459744 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" podUID="a02c0348-158b-4ef6-b3ed-cea47a4738ca" containerName="controller-manager" containerID="cri-o://3c07b34af40ed4b0cccb1002792bc698f91d4916a61a75cd9e8285f55ba7184d" gracePeriod=30 Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.470652 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh"] Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.470951 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" podUID="7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" containerName="route-controller-manager" containerID="cri-o://4abb0fb7353145ab59bab0ffdb7d371f50aeb5ebdfad9738c3565b31d4393fe6" gracePeriod=30 Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.626327 4721 generic.go:334] "Generic (PLEG): container finished" podID="a02c0348-158b-4ef6-b3ed-cea47a4738ca" containerID="3c07b34af40ed4b0cccb1002792bc698f91d4916a61a75cd9e8285f55ba7184d" exitCode=0 Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.626680 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" event={"ID":"a02c0348-158b-4ef6-b3ed-cea47a4738ca","Type":"ContainerDied","Data":"3c07b34af40ed4b0cccb1002792bc698f91d4916a61a75cd9e8285f55ba7184d"} Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.628188 4721 generic.go:334] "Generic (PLEG): container finished" podID="7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" containerID="4abb0fb7353145ab59bab0ffdb7d371f50aeb5ebdfad9738c3565b31d4393fe6" exitCode=0 Jan 30 21:21:43 crc kubenswrapper[4721]: I0130 21:21:43.628212 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" event={"ID":"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398","Type":"ContainerDied","Data":"4abb0fb7353145ab59bab0ffdb7d371f50aeb5ebdfad9738c3565b31d4393fe6"} Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.067624 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.073774 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224401 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-serving-cert\") pod \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224452 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-proxy-ca-bundles\") pod \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224475 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67vns\" (UniqueName: \"kubernetes.io/projected/a02c0348-158b-4ef6-b3ed-cea47a4738ca-kube-api-access-67vns\") pod \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224509 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a02c0348-158b-4ef6-b3ed-cea47a4738ca-serving-cert\") pod \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224548 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-client-ca\") pod \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224573 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-client-ca\") pod \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224631 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-config\") pod \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\" (UID: \"a02c0348-158b-4ef6-b3ed-cea47a4738ca\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224666 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-config\") pod \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.224690 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crxr4\" (UniqueName: \"kubernetes.io/projected/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-kube-api-access-crxr4\") pod \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\" (UID: \"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398\") " Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.225617 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-client-ca" (OuterVolumeSpecName: "client-ca") pod "a02c0348-158b-4ef6-b3ed-cea47a4738ca" (UID: "a02c0348-158b-4ef6-b3ed-cea47a4738ca"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.225746 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-config" (OuterVolumeSpecName: "config") pod "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" (UID: "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.225979 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-client-ca" (OuterVolumeSpecName: "client-ca") pod "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" (UID: "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.226212 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-config" (OuterVolumeSpecName: "config") pod "a02c0348-158b-4ef6-b3ed-cea47a4738ca" (UID: "a02c0348-158b-4ef6-b3ed-cea47a4738ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.226373 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.226401 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.226417 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.226711 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a02c0348-158b-4ef6-b3ed-cea47a4738ca" (UID: "a02c0348-158b-4ef6-b3ed-cea47a4738ca"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.230564 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02c0348-158b-4ef6-b3ed-cea47a4738ca-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a02c0348-158b-4ef6-b3ed-cea47a4738ca" (UID: "a02c0348-158b-4ef6-b3ed-cea47a4738ca"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.234135 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a02c0348-158b-4ef6-b3ed-cea47a4738ca-kube-api-access-67vns" (OuterVolumeSpecName: "kube-api-access-67vns") pod "a02c0348-158b-4ef6-b3ed-cea47a4738ca" (UID: "a02c0348-158b-4ef6-b3ed-cea47a4738ca"). InnerVolumeSpecName "kube-api-access-67vns". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.234278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-kube-api-access-crxr4" (OuterVolumeSpecName: "kube-api-access-crxr4") pod "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" (UID: "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398"). InnerVolumeSpecName "kube-api-access-crxr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.234392 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" (UID: "7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.327717 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a02c0348-158b-4ef6-b3ed-cea47a4738ca-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.327778 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.327819 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crxr4\" (UniqueName: \"kubernetes.io/projected/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-kube-api-access-crxr4\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.327845 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.327864 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a02c0348-158b-4ef6-b3ed-cea47a4738ca-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.327883 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67vns\" (UniqueName: \"kubernetes.io/projected/a02c0348-158b-4ef6-b3ed-cea47a4738ca-kube-api-access-67vns\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.637218 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" event={"ID":"7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398","Type":"ContainerDied","Data":"1de2c37d22cdaccc1a9a4f00a09c66f9e7812a4a921f6d271af62a10fa98abbe"} Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.637245 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.637276 4721 scope.go:117] "RemoveContainer" containerID="4abb0fb7353145ab59bab0ffdb7d371f50aeb5ebdfad9738c3565b31d4393fe6" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.640207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" event={"ID":"a02c0348-158b-4ef6-b3ed-cea47a4738ca","Type":"ContainerDied","Data":"a837b052ab4f833cdd13ed22b5cb15fc800f96f8b3d678be80aae85a8776ad0f"} Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.640277 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.661827 4721 scope.go:117] "RemoveContainer" containerID="3c07b34af40ed4b0cccb1002792bc698f91d4916a61a75cd9e8285f55ba7184d" Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.674829 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh"] Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.681561 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6874c7d55b-kwbsh"] Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.689525 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4"] Jan 30 21:21:44 crc kubenswrapper[4721]: I0130 21:21:44.692467 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-54fdd69f5d-jr7j4"] Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076562 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q"] Jan 30 21:21:45 crc kubenswrapper[4721]: E0130 21:21:45.076771 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076785 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 21:21:45 crc kubenswrapper[4721]: E0130 21:21:45.076804 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02c0348-158b-4ef6-b3ed-cea47a4738ca" containerName="controller-manager" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076810 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02c0348-158b-4ef6-b3ed-cea47a4738ca" containerName="controller-manager" Jan 30 21:21:45 crc kubenswrapper[4721]: E0130 21:21:45.076820 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" containerName="route-controller-manager" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076826 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" containerName="route-controller-manager" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076921 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="a02c0348-158b-4ef6-b3ed-cea47a4738ca" containerName="controller-manager" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076935 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.076945 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" containerName="route-controller-manager" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.077307 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.083486 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.083591 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.083657 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.085050 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.085495 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.087507 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.089917 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-99fz7"] Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.090711 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.095767 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.095882 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.095945 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.095985 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.096174 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.097041 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.105939 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q"] Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.105988 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.123062 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-99fz7"] Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.239742 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-proxy-ca-bundles\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.239811 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b03af70-b38b-4b01-b80a-2b0f39c63906-serving-cert\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.240005 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-client-ca\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.240112 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vxkx\" (UniqueName: \"kubernetes.io/projected/1b03af70-b38b-4b01-b80a-2b0f39c63906-kube-api-access-8vxkx\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.241093 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmcqp\" (UniqueName: \"kubernetes.io/projected/1b87678b-4009-4707-bd9f-b3d18f08d90b-kube-api-access-qmcqp\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.241184 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-config\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.241233 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-client-ca\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.241328 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b87678b-4009-4707-bd9f-b3d18f08d90b-serving-cert\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.241643 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-config\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343379 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-client-ca\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343467 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vxkx\" (UniqueName: \"kubernetes.io/projected/1b03af70-b38b-4b01-b80a-2b0f39c63906-kube-api-access-8vxkx\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343517 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmcqp\" (UniqueName: \"kubernetes.io/projected/1b87678b-4009-4707-bd9f-b3d18f08d90b-kube-api-access-qmcqp\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343536 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-config\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343561 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-client-ca\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343580 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b87678b-4009-4707-bd9f-b3d18f08d90b-serving-cert\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-config\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-proxy-ca-bundles\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.343653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b03af70-b38b-4b01-b80a-2b0f39c63906-serving-cert\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.346070 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-config\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.347092 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-proxy-ca-bundles\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.347559 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-client-ca\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.348220 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-client-ca\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.349018 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-config\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.350027 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b03af70-b38b-4b01-b80a-2b0f39c63906-serving-cert\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.350980 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b87678b-4009-4707-bd9f-b3d18f08d90b-serving-cert\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.359644 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vxkx\" (UniqueName: \"kubernetes.io/projected/1b03af70-b38b-4b01-b80a-2b0f39c63906-kube-api-access-8vxkx\") pod \"route-controller-manager-5b7c4ffcb5-9m22q\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.361266 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmcqp\" (UniqueName: \"kubernetes.io/projected/1b87678b-4009-4707-bd9f-b3d18f08d90b-kube-api-access-qmcqp\") pod \"controller-manager-65cd7b5985-99fz7\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.396160 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.406363 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.665006 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-99fz7"] Jan 30 21:21:45 crc kubenswrapper[4721]: W0130 21:21:45.673209 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b87678b_4009_4707_bd9f_b3d18f08d90b.slice/crio-51c4dc15fb4eddfad6e241b998e966d1b2e88966182797ec8afaf1e62fcf7353 WatchSource:0}: Error finding container 51c4dc15fb4eddfad6e241b998e966d1b2e88966182797ec8afaf1e62fcf7353: Status 404 returned error can't find the container with id 51c4dc15fb4eddfad6e241b998e966d1b2e88966182797ec8afaf1e62fcf7353 Jan 30 21:21:45 crc kubenswrapper[4721]: I0130 21:21:45.815888 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q"] Jan 30 21:21:45 crc kubenswrapper[4721]: W0130 21:21:45.824520 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b03af70_b38b_4b01_b80a_2b0f39c63906.slice/crio-de06a00135425981da3eaace8134e7973be9fc2bf85cdc43b9f911b5fccc729f WatchSource:0}: Error finding container de06a00135425981da3eaace8134e7973be9fc2bf85cdc43b9f911b5fccc729f: Status 404 returned error can't find the container with id de06a00135425981da3eaace8134e7973be9fc2bf85cdc43b9f911b5fccc729f Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.100533 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398" path="/var/lib/kubelet/pods/7b3e7a2e-35b4-44ea-9fd7-6ce7997e7398/volumes" Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.102006 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a02c0348-158b-4ef6-b3ed-cea47a4738ca" path="/var/lib/kubelet/pods/a02c0348-158b-4ef6-b3ed-cea47a4738ca/volumes" Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.658800 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" event={"ID":"1b87678b-4009-4707-bd9f-b3d18f08d90b","Type":"ContainerStarted","Data":"55ff6960cac721026706fd72cbbf6004c782ddc767d4fd33930995d1351e90fa"} Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.658856 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" event={"ID":"1b87678b-4009-4707-bd9f-b3d18f08d90b","Type":"ContainerStarted","Data":"51c4dc15fb4eddfad6e241b998e966d1b2e88966182797ec8afaf1e62fcf7353"} Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.659037 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.660804 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" event={"ID":"1b03af70-b38b-4b01-b80a-2b0f39c63906","Type":"ContainerStarted","Data":"db266591b36909cc6025bae799de5d53008535bec335877df5e50f9fdb80ec63"} Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.660832 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" event={"ID":"1b03af70-b38b-4b01-b80a-2b0f39c63906","Type":"ContainerStarted","Data":"de06a00135425981da3eaace8134e7973be9fc2bf85cdc43b9f911b5fccc729f"} Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.661500 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.664415 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.672248 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:46 crc kubenswrapper[4721]: I0130 21:21:46.678087 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" podStartSLOduration=3.678075187 podStartE2EDuration="3.678075187s" podCreationTimestamp="2026-01-30 21:21:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:46.676776586 +0000 UTC m=+295.468677822" watchObservedRunningTime="2026-01-30 21:21:46.678075187 +0000 UTC m=+295.469976433" Jan 30 21:21:47 crc kubenswrapper[4721]: I0130 21:21:47.522030 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 30 21:21:49 crc kubenswrapper[4721]: I0130 21:21:49.831198 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 30 21:21:50 crc kubenswrapper[4721]: I0130 21:21:50.688490 4721 generic.go:334] "Generic (PLEG): container finished" podID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerID="9ffcd0dc728edff83277dc59541f964354d90506eab04b9aa28c29eaa992426b" exitCode=0 Jan 30 21:21:50 crc kubenswrapper[4721]: I0130 21:21:50.688528 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" event={"ID":"10335cae-c54e-4bf1-b41c-6df530ac47dc","Type":"ContainerDied","Data":"9ffcd0dc728edff83277dc59541f964354d90506eab04b9aa28c29eaa992426b"} Jan 30 21:21:50 crc kubenswrapper[4721]: I0130 21:21:50.689061 4721 scope.go:117] "RemoveContainer" containerID="9ffcd0dc728edff83277dc59541f964354d90506eab04b9aa28c29eaa992426b" Jan 30 21:21:50 crc kubenswrapper[4721]: I0130 21:21:50.710084 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" podStartSLOduration=7.710069605 podStartE2EDuration="7.710069605s" podCreationTimestamp="2026-01-30 21:21:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:46.729628352 +0000 UTC m=+295.521529598" watchObservedRunningTime="2026-01-30 21:21:50.710069605 +0000 UTC m=+299.501970851" Jan 30 21:21:51 crc kubenswrapper[4721]: I0130 21:21:51.697034 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" event={"ID":"10335cae-c54e-4bf1-b41c-6df530ac47dc","Type":"ContainerStarted","Data":"07b5fcb4f3070dcd172cf963310153e70380e4975d344383001dd6d28d5b580c"} Jan 30 21:21:51 crc kubenswrapper[4721]: I0130 21:21:51.698549 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:21:51 crc kubenswrapper[4721]: I0130 21:21:51.701089 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:21:51 crc kubenswrapper[4721]: I0130 21:21:51.789965 4721 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 30 21:21:55 crc kubenswrapper[4721]: I0130 21:21:55.617169 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 30 21:21:55 crc kubenswrapper[4721]: I0130 21:21:55.717958 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.374369 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-855fx"] Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.375140 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-855fx" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="registry-server" containerID="cri-o://123757439b6efa1e1508614c8f53c0d020d37bed52274a88202c211b6d05dada" gracePeriod=2 Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.421874 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.525110 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q"] Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.525571 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" podUID="1b03af70-b38b-4b01-b80a-2b0f39c63906" containerName="route-controller-manager" containerID="cri-o://db266591b36909cc6025bae799de5d53008535bec335877df5e50f9fdb80ec63" gracePeriod=30 Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.539998 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-99fz7"] Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.540231 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" podUID="1b87678b-4009-4707-bd9f-b3d18f08d90b" containerName="controller-manager" containerID="cri-o://55ff6960cac721026706fd72cbbf6004c782ddc767d4fd33930995d1351e90fa" gracePeriod=30 Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.744967 4721 generic.go:334] "Generic (PLEG): container finished" podID="1b87678b-4009-4707-bd9f-b3d18f08d90b" containerID="55ff6960cac721026706fd72cbbf6004c782ddc767d4fd33930995d1351e90fa" exitCode=0 Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.745040 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" event={"ID":"1b87678b-4009-4707-bd9f-b3d18f08d90b","Type":"ContainerDied","Data":"55ff6960cac721026706fd72cbbf6004c782ddc767d4fd33930995d1351e90fa"} Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.749279 4721 generic.go:334] "Generic (PLEG): container finished" podID="1b03af70-b38b-4b01-b80a-2b0f39c63906" containerID="db266591b36909cc6025bae799de5d53008535bec335877df5e50f9fdb80ec63" exitCode=0 Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.749379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" event={"ID":"1b03af70-b38b-4b01-b80a-2b0f39c63906","Type":"ContainerDied","Data":"db266591b36909cc6025bae799de5d53008535bec335877df5e50f9fdb80ec63"} Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.761873 4721 generic.go:334] "Generic (PLEG): container finished" podID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerID="123757439b6efa1e1508614c8f53c0d020d37bed52274a88202c211b6d05dada" exitCode=0 Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.761931 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-855fx" event={"ID":"60ea230f-98d6-4955-bf6a-71a91d65ff20","Type":"ContainerDied","Data":"123757439b6efa1e1508614c8f53c0d020d37bed52274a88202c211b6d05dada"} Jan 30 21:21:56 crc kubenswrapper[4721]: I0130 21:21:56.897021 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.028366 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-utilities\") pod \"60ea230f-98d6-4955-bf6a-71a91d65ff20\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.028497 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-catalog-content\") pod \"60ea230f-98d6-4955-bf6a-71a91d65ff20\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.028625 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jw2fp\" (UniqueName: \"kubernetes.io/projected/60ea230f-98d6-4955-bf6a-71a91d65ff20-kube-api-access-jw2fp\") pod \"60ea230f-98d6-4955-bf6a-71a91d65ff20\" (UID: \"60ea230f-98d6-4955-bf6a-71a91d65ff20\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.029452 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-utilities" (OuterVolumeSpecName: "utilities") pod "60ea230f-98d6-4955-bf6a-71a91d65ff20" (UID: "60ea230f-98d6-4955-bf6a-71a91d65ff20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.051970 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ea230f-98d6-4955-bf6a-71a91d65ff20-kube-api-access-jw2fp" (OuterVolumeSpecName: "kube-api-access-jw2fp") pod "60ea230f-98d6-4955-bf6a-71a91d65ff20" (UID: "60ea230f-98d6-4955-bf6a-71a91d65ff20"). InnerVolumeSpecName "kube-api-access-jw2fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.079435 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60ea230f-98d6-4955-bf6a-71a91d65ff20" (UID: "60ea230f-98d6-4955-bf6a-71a91d65ff20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.130478 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jw2fp\" (UniqueName: \"kubernetes.io/projected/60ea230f-98d6-4955-bf6a-71a91d65ff20-kube-api-access-jw2fp\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.130559 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.130582 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ea230f-98d6-4955-bf6a-71a91d65ff20-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.139001 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.231363 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-config\") pod \"1b03af70-b38b-4b01-b80a-2b0f39c63906\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.231460 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vxkx\" (UniqueName: \"kubernetes.io/projected/1b03af70-b38b-4b01-b80a-2b0f39c63906-kube-api-access-8vxkx\") pod \"1b03af70-b38b-4b01-b80a-2b0f39c63906\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.231565 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b03af70-b38b-4b01-b80a-2b0f39c63906-serving-cert\") pod \"1b03af70-b38b-4b01-b80a-2b0f39c63906\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.231590 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-client-ca\") pod \"1b03af70-b38b-4b01-b80a-2b0f39c63906\" (UID: \"1b03af70-b38b-4b01-b80a-2b0f39c63906\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.232268 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-client-ca" (OuterVolumeSpecName: "client-ca") pod "1b03af70-b38b-4b01-b80a-2b0f39c63906" (UID: "1b03af70-b38b-4b01-b80a-2b0f39c63906"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.232383 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-config" (OuterVolumeSpecName: "config") pod "1b03af70-b38b-4b01-b80a-2b0f39c63906" (UID: "1b03af70-b38b-4b01-b80a-2b0f39c63906"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.235205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b03af70-b38b-4b01-b80a-2b0f39c63906-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1b03af70-b38b-4b01-b80a-2b0f39c63906" (UID: "1b03af70-b38b-4b01-b80a-2b0f39c63906"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.235465 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b03af70-b38b-4b01-b80a-2b0f39c63906-kube-api-access-8vxkx" (OuterVolumeSpecName: "kube-api-access-8vxkx") pod "1b03af70-b38b-4b01-b80a-2b0f39c63906" (UID: "1b03af70-b38b-4b01-b80a-2b0f39c63906"). InnerVolumeSpecName "kube-api-access-8vxkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.260040 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.333529 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b03af70-b38b-4b01-b80a-2b0f39c63906-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.333570 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.333581 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b03af70-b38b-4b01-b80a-2b0f39c63906-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.333592 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vxkx\" (UniqueName: \"kubernetes.io/projected/1b03af70-b38b-4b01-b80a-2b0f39c63906-kube-api-access-8vxkx\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.435008 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b87678b-4009-4707-bd9f-b3d18f08d90b-serving-cert\") pod \"1b87678b-4009-4707-bd9f-b3d18f08d90b\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.435401 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-client-ca\") pod \"1b87678b-4009-4707-bd9f-b3d18f08d90b\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.435456 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-config\") pod \"1b87678b-4009-4707-bd9f-b3d18f08d90b\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.435545 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmcqp\" (UniqueName: \"kubernetes.io/projected/1b87678b-4009-4707-bd9f-b3d18f08d90b-kube-api-access-qmcqp\") pod \"1b87678b-4009-4707-bd9f-b3d18f08d90b\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.435606 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-proxy-ca-bundles\") pod \"1b87678b-4009-4707-bd9f-b3d18f08d90b\" (UID: \"1b87678b-4009-4707-bd9f-b3d18f08d90b\") " Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.436293 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-config" (OuterVolumeSpecName: "config") pod "1b87678b-4009-4707-bd9f-b3d18f08d90b" (UID: "1b87678b-4009-4707-bd9f-b3d18f08d90b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.436338 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-client-ca" (OuterVolumeSpecName: "client-ca") pod "1b87678b-4009-4707-bd9f-b3d18f08d90b" (UID: "1b87678b-4009-4707-bd9f-b3d18f08d90b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.436534 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "1b87678b-4009-4707-bd9f-b3d18f08d90b" (UID: "1b87678b-4009-4707-bd9f-b3d18f08d90b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.438602 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b87678b-4009-4707-bd9f-b3d18f08d90b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1b87678b-4009-4707-bd9f-b3d18f08d90b" (UID: "1b87678b-4009-4707-bd9f-b3d18f08d90b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.439119 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b87678b-4009-4707-bd9f-b3d18f08d90b-kube-api-access-qmcqp" (OuterVolumeSpecName: "kube-api-access-qmcqp") pod "1b87678b-4009-4707-bd9f-b3d18f08d90b" (UID: "1b87678b-4009-4707-bd9f-b3d18f08d90b"). InnerVolumeSpecName "kube-api-access-qmcqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.537943 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.537991 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.538003 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmcqp\" (UniqueName: \"kubernetes.io/projected/1b87678b-4009-4707-bd9f-b3d18f08d90b-kube-api-access-qmcqp\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.538017 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1b87678b-4009-4707-bd9f-b3d18f08d90b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.538029 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b87678b-4009-4707-bd9f-b3d18f08d90b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.788939 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.790534 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-855fx" event={"ID":"60ea230f-98d6-4955-bf6a-71a91d65ff20","Type":"ContainerDied","Data":"c02eb06e3ee5961533503c5c8457ae1451088b5e2619d0adf44118db841b4234"} Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.790641 4721 scope.go:117] "RemoveContainer" containerID="123757439b6efa1e1508614c8f53c0d020d37bed52274a88202c211b6d05dada" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.790689 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-855fx" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.794392 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.794477 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cd7b5985-99fz7" event={"ID":"1b87678b-4009-4707-bd9f-b3d18f08d90b","Type":"ContainerDied","Data":"51c4dc15fb4eddfad6e241b998e966d1b2e88966182797ec8afaf1e62fcf7353"} Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.798895 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" event={"ID":"1b03af70-b38b-4b01-b80a-2b0f39c63906","Type":"ContainerDied","Data":"de06a00135425981da3eaace8134e7973be9fc2bf85cdc43b9f911b5fccc729f"} Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.799036 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.814867 4721 scope.go:117] "RemoveContainer" containerID="cf357cfb88d2e54bc8d0c47a266a5433480088d4912a8613985b306f90aeee27" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.836239 4721 scope.go:117] "RemoveContainer" containerID="fe4bda470704685f72b9286fb9225436d3a2f2b148aefde4a55e815586df118d" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.838369 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-855fx"] Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.853651 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-855fx"] Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.865897 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-99fz7"] Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.868726 4721 scope.go:117] "RemoveContainer" containerID="55ff6960cac721026706fd72cbbf6004c782ddc767d4fd33930995d1351e90fa" Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.875907 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-99fz7"] Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.880687 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q"] Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.883528 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9m22q"] Jan 30 21:21:57 crc kubenswrapper[4721]: I0130 21:21:57.883646 4721 scope.go:117] "RemoveContainer" containerID="db266591b36909cc6025bae799de5d53008535bec335877df5e50f9fdb80ec63" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087166 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5bd976b8c9-c9t88"] Jan 30 21:21:58 crc kubenswrapper[4721]: E0130 21:21:58.087579 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="extract-utilities" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087596 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="extract-utilities" Jan 30 21:21:58 crc kubenswrapper[4721]: E0130 21:21:58.087628 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="extract-content" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087638 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="extract-content" Jan 30 21:21:58 crc kubenswrapper[4721]: E0130 21:21:58.087653 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="registry-server" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087663 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="registry-server" Jan 30 21:21:58 crc kubenswrapper[4721]: E0130 21:21:58.087689 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b03af70-b38b-4b01-b80a-2b0f39c63906" containerName="route-controller-manager" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087702 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b03af70-b38b-4b01-b80a-2b0f39c63906" containerName="route-controller-manager" Jan 30 21:21:58 crc kubenswrapper[4721]: E0130 21:21:58.087718 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b87678b-4009-4707-bd9f-b3d18f08d90b" containerName="controller-manager" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087725 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b87678b-4009-4707-bd9f-b3d18f08d90b" containerName="controller-manager" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087876 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" containerName="registry-server" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087899 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b03af70-b38b-4b01-b80a-2b0f39c63906" containerName="route-controller-manager" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.087915 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b87678b-4009-4707-bd9f-b3d18f08d90b" containerName="controller-manager" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.088771 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.093902 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.094066 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.094122 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.094116 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.094447 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.105133 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.107240 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b03af70-b38b-4b01-b80a-2b0f39c63906" path="/var/lib/kubelet/pods/1b03af70-b38b-4b01-b80a-2b0f39c63906/volumes" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.108545 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b87678b-4009-4707-bd9f-b3d18f08d90b" path="/var/lib/kubelet/pods/1b87678b-4009-4707-bd9f-b3d18f08d90b/volumes" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.109198 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ea230f-98d6-4955-bf6a-71a91d65ff20" path="/var/lib/kubelet/pods/60ea230f-98d6-4955-bf6a-71a91d65ff20/volumes" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.110639 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk"] Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.111635 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.113743 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bd976b8c9-c9t88"] Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.116834 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.117025 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.117560 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.117651 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.117571 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.120633 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk"] Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.121903 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.133878 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.249630 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-proxy-ca-bundles\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.249733 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkmqw\" (UniqueName: \"kubernetes.io/projected/ddc93d65-9db1-42d3-8931-742c917d682d-kube-api-access-jkmqw\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.249770 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc93d65-9db1-42d3-8931-742c917d682d-serving-cert\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.249808 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-client-ca\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.249999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-config\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.250253 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a932937-131d-456b-8805-8ba3121a70e8-serving-cert\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.250377 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-client-ca\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.250455 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74ftm\" (UniqueName: \"kubernetes.io/projected/8a932937-131d-456b-8805-8ba3121a70e8-kube-api-access-74ftm\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.250490 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-config\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.351789 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-proxy-ca-bundles\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352206 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkmqw\" (UniqueName: \"kubernetes.io/projected/ddc93d65-9db1-42d3-8931-742c917d682d-kube-api-access-jkmqw\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352328 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc93d65-9db1-42d3-8931-742c917d682d-serving-cert\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-config\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352483 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-client-ca\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352613 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a932937-131d-456b-8805-8ba3121a70e8-serving-cert\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352716 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-client-ca\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352809 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74ftm\" (UniqueName: \"kubernetes.io/projected/8a932937-131d-456b-8805-8ba3121a70e8-kube-api-access-74ftm\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.352890 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-config\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.353202 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-proxy-ca-bundles\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.354372 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-config\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.354503 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-client-ca\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.354602 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-client-ca\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.355214 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-config\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.359422 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a932937-131d-456b-8805-8ba3121a70e8-serving-cert\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.362086 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc93d65-9db1-42d3-8931-742c917d682d-serving-cert\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.376367 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74ftm\" (UniqueName: \"kubernetes.io/projected/8a932937-131d-456b-8805-8ba3121a70e8-kube-api-access-74ftm\") pod \"controller-manager-5bd976b8c9-c9t88\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.379056 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkmqw\" (UniqueName: \"kubernetes.io/projected/ddc93d65-9db1-42d3-8931-742c917d682d-kube-api-access-jkmqw\") pod \"route-controller-manager-7748667445-9sbhk\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.448124 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.463382 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.797124 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.864190 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bd976b8c9-c9t88"] Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.950319 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk"] Jan 30 21:21:58 crc kubenswrapper[4721]: W0130 21:21:58.959086 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddc93d65_9db1_42d3_8931_742c917d682d.slice/crio-950a0aa326105ae0963f5df3ba95ff4f66b0e722aac15508555e6e4481061c9e WatchSource:0}: Error finding container 950a0aa326105ae0963f5df3ba95ff4f66b0e722aac15508555e6e4481061c9e: Status 404 returned error can't find the container with id 950a0aa326105ae0963f5df3ba95ff4f66b0e722aac15508555e6e4481061c9e Jan 30 21:21:58 crc kubenswrapper[4721]: I0130 21:21:58.987048 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wrlqs"] Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.002815 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wrlqs" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="registry-server" containerID="cri-o://890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0" gracePeriod=2 Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.382842 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.470963 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gj5cj\" (UniqueName: \"kubernetes.io/projected/ecc44241-f566-4099-b6cf-adf0420a14f1-kube-api-access-gj5cj\") pod \"ecc44241-f566-4099-b6cf-adf0420a14f1\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.471505 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-utilities\") pod \"ecc44241-f566-4099-b6cf-adf0420a14f1\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.471656 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-catalog-content\") pod \"ecc44241-f566-4099-b6cf-adf0420a14f1\" (UID: \"ecc44241-f566-4099-b6cf-adf0420a14f1\") " Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.472978 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-utilities" (OuterVolumeSpecName: "utilities") pod "ecc44241-f566-4099-b6cf-adf0420a14f1" (UID: "ecc44241-f566-4099-b6cf-adf0420a14f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.477403 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc44241-f566-4099-b6cf-adf0420a14f1-kube-api-access-gj5cj" (OuterVolumeSpecName: "kube-api-access-gj5cj") pod "ecc44241-f566-4099-b6cf-adf0420a14f1" (UID: "ecc44241-f566-4099-b6cf-adf0420a14f1"). InnerVolumeSpecName "kube-api-access-gj5cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.543639 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecc44241-f566-4099-b6cf-adf0420a14f1" (UID: "ecc44241-f566-4099-b6cf-adf0420a14f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.573208 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gj5cj\" (UniqueName: \"kubernetes.io/projected/ecc44241-f566-4099-b6cf-adf0420a14f1-kube-api-access-gj5cj\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.573258 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.573269 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc44241-f566-4099-b6cf-adf0420a14f1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.816039 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" event={"ID":"ddc93d65-9db1-42d3-8931-742c917d682d","Type":"ContainerStarted","Data":"3dcacef0f55801024503abbba5db5fe1d823ee8430a533870e0f5264cfbda81a"} Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.816092 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" event={"ID":"ddc93d65-9db1-42d3-8931-742c917d682d","Type":"ContainerStarted","Data":"950a0aa326105ae0963f5df3ba95ff4f66b0e722aac15508555e6e4481061c9e"} Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.817576 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.818913 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" event={"ID":"8a932937-131d-456b-8805-8ba3121a70e8","Type":"ContainerStarted","Data":"9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a"} Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.818944 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" event={"ID":"8a932937-131d-456b-8805-8ba3121a70e8","Type":"ContainerStarted","Data":"68a422c5ed0be63645c49597364abc3f39656eb73a4a97c3d527d5a86c18ef23"} Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.819739 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.823031 4721 generic.go:334] "Generic (PLEG): container finished" podID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerID="890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0" exitCode=0 Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.823080 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerDied","Data":"890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0"} Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.823109 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wrlqs" event={"ID":"ecc44241-f566-4099-b6cf-adf0420a14f1","Type":"ContainerDied","Data":"168a3ae603142cbb35c574b84355569e0c493e1421862a3c80bdde71d57494c3"} Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.823121 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wrlqs" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.823617 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.823128 4721 scope.go:117] "RemoveContainer" containerID="890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.826211 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.838699 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" podStartSLOduration=3.83868328 podStartE2EDuration="3.83868328s" podCreationTimestamp="2026-01-30 21:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:59.835215209 +0000 UTC m=+308.627116455" watchObservedRunningTime="2026-01-30 21:21:59.83868328 +0000 UTC m=+308.630584526" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.848677 4721 scope.go:117] "RemoveContainer" containerID="be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.881230 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" podStartSLOduration=3.8811978849999997 podStartE2EDuration="3.881197885s" podCreationTimestamp="2026-01-30 21:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:21:59.879282274 +0000 UTC m=+308.671183530" watchObservedRunningTime="2026-01-30 21:21:59.881197885 +0000 UTC m=+308.673099131" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.889579 4721 scope.go:117] "RemoveContainer" containerID="2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.907405 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wrlqs"] Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.910483 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wrlqs"] Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.916312 4721 scope.go:117] "RemoveContainer" containerID="890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0" Jan 30 21:21:59 crc kubenswrapper[4721]: E0130 21:21:59.917023 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0\": container with ID starting with 890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0 not found: ID does not exist" containerID="890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.917102 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0"} err="failed to get container status \"890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0\": rpc error: code = NotFound desc = could not find container \"890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0\": container with ID starting with 890ce6b55fabc3d208616119f925fc3de69f79474a0c004c64888105f104a5f0 not found: ID does not exist" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.917142 4721 scope.go:117] "RemoveContainer" containerID="be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19" Jan 30 21:21:59 crc kubenswrapper[4721]: E0130 21:21:59.917693 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19\": container with ID starting with be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19 not found: ID does not exist" containerID="be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.917761 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19"} err="failed to get container status \"be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19\": rpc error: code = NotFound desc = could not find container \"be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19\": container with ID starting with be566c1d66f4e8a938131c0d8e069cf0f7154d8cb37cddbc1eb8cba0969ebe19 not found: ID does not exist" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.917803 4721 scope.go:117] "RemoveContainer" containerID="2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e" Jan 30 21:21:59 crc kubenswrapper[4721]: E0130 21:21:59.919183 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e\": container with ID starting with 2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e not found: ID does not exist" containerID="2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e" Jan 30 21:21:59 crc kubenswrapper[4721]: I0130 21:21:59.919236 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e"} err="failed to get container status \"2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e\": rpc error: code = NotFound desc = could not find container \"2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e\": container with ID starting with 2fd9926083d68910870ff38cf89ae030697a587cfe55d02f75c5117f6ac25e1e not found: ID does not exist" Jan 30 21:22:00 crc kubenswrapper[4721]: I0130 21:22:00.103419 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" path="/var/lib/kubelet/pods/ecc44241-f566-4099-b6cf-adf0420a14f1/volumes" Jan 30 21:22:00 crc kubenswrapper[4721]: I0130 21:22:00.374613 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 30 21:22:00 crc kubenswrapper[4721]: I0130 21:22:00.934069 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 30 21:22:03 crc kubenswrapper[4721]: I0130 21:22:03.346269 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 30 21:22:04 crc kubenswrapper[4721]: I0130 21:22:04.463180 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 30 21:22:08 crc kubenswrapper[4721]: I0130 21:22:08.578898 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 30 21:22:09 crc kubenswrapper[4721]: I0130 21:22:09.311678 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 30 21:22:10 crc kubenswrapper[4721]: I0130 21:22:10.983755 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 30 21:22:14 crc kubenswrapper[4721]: I0130 21:22:14.662372 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 30 21:22:15 crc kubenswrapper[4721]: I0130 21:22:15.055500 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 30 21:22:16 crc kubenswrapper[4721]: I0130 21:22:16.532385 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk"] Jan 30 21:22:16 crc kubenswrapper[4721]: I0130 21:22:16.533611 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" podUID="ddc93d65-9db1-42d3-8931-742c917d682d" containerName="route-controller-manager" containerID="cri-o://3dcacef0f55801024503abbba5db5fe1d823ee8430a533870e0f5264cfbda81a" gracePeriod=30 Jan 30 21:22:16 crc kubenswrapper[4721]: I0130 21:22:16.957107 4721 generic.go:334] "Generic (PLEG): container finished" podID="ddc93d65-9db1-42d3-8931-742c917d682d" containerID="3dcacef0f55801024503abbba5db5fe1d823ee8430a533870e0f5264cfbda81a" exitCode=0 Jan 30 21:22:16 crc kubenswrapper[4721]: I0130 21:22:16.957226 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" event={"ID":"ddc93d65-9db1-42d3-8931-742c917d682d","Type":"ContainerDied","Data":"3dcacef0f55801024503abbba5db5fe1d823ee8430a533870e0f5264cfbda81a"} Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.115640 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.279465 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-client-ca\") pod \"ddc93d65-9db1-42d3-8931-742c917d682d\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.279602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc93d65-9db1-42d3-8931-742c917d682d-serving-cert\") pod \"ddc93d65-9db1-42d3-8931-742c917d682d\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.279699 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-config\") pod \"ddc93d65-9db1-42d3-8931-742c917d682d\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.279794 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkmqw\" (UniqueName: \"kubernetes.io/projected/ddc93d65-9db1-42d3-8931-742c917d682d-kube-api-access-jkmqw\") pod \"ddc93d65-9db1-42d3-8931-742c917d682d\" (UID: \"ddc93d65-9db1-42d3-8931-742c917d682d\") " Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.280961 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-config" (OuterVolumeSpecName: "config") pod "ddc93d65-9db1-42d3-8931-742c917d682d" (UID: "ddc93d65-9db1-42d3-8931-742c917d682d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.281000 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-client-ca" (OuterVolumeSpecName: "client-ca") pod "ddc93d65-9db1-42d3-8931-742c917d682d" (UID: "ddc93d65-9db1-42d3-8931-742c917d682d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.290266 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddc93d65-9db1-42d3-8931-742c917d682d-kube-api-access-jkmqw" (OuterVolumeSpecName: "kube-api-access-jkmqw") pod "ddc93d65-9db1-42d3-8931-742c917d682d" (UID: "ddc93d65-9db1-42d3-8931-742c917d682d"). InnerVolumeSpecName "kube-api-access-jkmqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.290859 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddc93d65-9db1-42d3-8931-742c917d682d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ddc93d65-9db1-42d3-8931-742c917d682d" (UID: "ddc93d65-9db1-42d3-8931-742c917d682d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.381311 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.381351 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkmqw\" (UniqueName: \"kubernetes.io/projected/ddc93d65-9db1-42d3-8931-742c917d682d-kube-api-access-jkmqw\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.381364 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ddc93d65-9db1-42d3-8931-742c917d682d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.381376 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc93d65-9db1-42d3-8931-742c917d682d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.969394 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" event={"ID":"ddc93d65-9db1-42d3-8931-742c917d682d","Type":"ContainerDied","Data":"950a0aa326105ae0963f5df3ba95ff4f66b0e722aac15508555e6e4481061c9e"} Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.969496 4721 scope.go:117] "RemoveContainer" containerID="3dcacef0f55801024503abbba5db5fe1d823ee8430a533870e0f5264cfbda81a" Jan 30 21:22:17 crc kubenswrapper[4721]: I0130 21:22:17.969709 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.018700 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk"] Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.025457 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7748667445-9sbhk"] Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.106666 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddc93d65-9db1-42d3-8931-742c917d682d" path="/var/lib/kubelet/pods/ddc93d65-9db1-42d3-8931-742c917d682d/volumes" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.107491 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf"] Jan 30 21:22:18 crc kubenswrapper[4721]: E0130 21:22:18.107835 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="registry-server" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.107866 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="registry-server" Jan 30 21:22:18 crc kubenswrapper[4721]: E0130 21:22:18.107910 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="extract-utilities" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.107927 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="extract-utilities" Jan 30 21:22:18 crc kubenswrapper[4721]: E0130 21:22:18.107950 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="extract-content" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.107965 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="extract-content" Jan 30 21:22:18 crc kubenswrapper[4721]: E0130 21:22:18.107983 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddc93d65-9db1-42d3-8931-742c917d682d" containerName="route-controller-manager" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.107997 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddc93d65-9db1-42d3-8931-742c917d682d" containerName="route-controller-manager" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.108232 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc44241-f566-4099-b6cf-adf0420a14f1" containerName="registry-server" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.108261 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddc93d65-9db1-42d3-8931-742c917d682d" containerName="route-controller-manager" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.109059 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.115725 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.115979 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.116281 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.116370 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.116654 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.117129 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.123870 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf"] Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.195418 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbxzb\" (UniqueName: \"kubernetes.io/projected/a90f336c-7148-4606-bdf4-368e361f4ca3-kube-api-access-mbxzb\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.195546 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a90f336c-7148-4606-bdf4-368e361f4ca3-client-ca\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.195604 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a90f336c-7148-4606-bdf4-368e361f4ca3-serving-cert\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.195816 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a90f336c-7148-4606-bdf4-368e361f4ca3-config\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.297225 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a90f336c-7148-4606-bdf4-368e361f4ca3-client-ca\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.297428 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a90f336c-7148-4606-bdf4-368e361f4ca3-serving-cert\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.297519 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a90f336c-7148-4606-bdf4-368e361f4ca3-config\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.297579 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbxzb\" (UniqueName: \"kubernetes.io/projected/a90f336c-7148-4606-bdf4-368e361f4ca3-kube-api-access-mbxzb\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.299342 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a90f336c-7148-4606-bdf4-368e361f4ca3-config\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.300098 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a90f336c-7148-4606-bdf4-368e361f4ca3-client-ca\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.307755 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a90f336c-7148-4606-bdf4-368e361f4ca3-serving-cert\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.328674 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbxzb\" (UniqueName: \"kubernetes.io/projected/a90f336c-7148-4606-bdf4-368e361f4ca3-kube-api-access-mbxzb\") pod \"route-controller-manager-5b7c4ffcb5-9w4jf\" (UID: \"a90f336c-7148-4606-bdf4-368e361f4ca3\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.497281 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:18 crc kubenswrapper[4721]: I0130 21:22:18.969181 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf"] Jan 30 21:22:18 crc kubenswrapper[4721]: W0130 21:22:18.975852 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda90f336c_7148_4606_bdf4_368e361f4ca3.slice/crio-babece9208841ff519aff296d4ce57d1a363e7123757eb50ff20954e65245547 WatchSource:0}: Error finding container babece9208841ff519aff296d4ce57d1a363e7123757eb50ff20954e65245547: Status 404 returned error can't find the container with id babece9208841ff519aff296d4ce57d1a363e7123757eb50ff20954e65245547 Jan 30 21:22:19 crc kubenswrapper[4721]: I0130 21:22:19.987022 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" event={"ID":"a90f336c-7148-4606-bdf4-368e361f4ca3","Type":"ContainerStarted","Data":"3018fd6fe086d4836e01b4c00b2bfeca48feb014a32659f416e4658910ebd9fc"} Jan 30 21:22:19 crc kubenswrapper[4721]: I0130 21:22:19.987072 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" event={"ID":"a90f336c-7148-4606-bdf4-368e361f4ca3","Type":"ContainerStarted","Data":"babece9208841ff519aff296d4ce57d1a363e7123757eb50ff20954e65245547"} Jan 30 21:22:19 crc kubenswrapper[4721]: I0130 21:22:19.988618 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:19 crc kubenswrapper[4721]: I0130 21:22:19.992848 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" Jan 30 21:22:20 crc kubenswrapper[4721]: I0130 21:22:20.009843 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b7c4ffcb5-9w4jf" podStartSLOduration=4.009806519 podStartE2EDuration="4.009806519s" podCreationTimestamp="2026-01-30 21:22:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:22:20.008779357 +0000 UTC m=+328.800680613" watchObservedRunningTime="2026-01-30 21:22:20.009806519 +0000 UTC m=+328.801707775" Jan 30 21:22:36 crc kubenswrapper[4721]: I0130 21:22:36.517736 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5bd976b8c9-c9t88"] Jan 30 21:22:36 crc kubenswrapper[4721]: I0130 21:22:36.518798 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" podUID="8a932937-131d-456b-8805-8ba3121a70e8" containerName="controller-manager" containerID="cri-o://9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a" gracePeriod=30 Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.050576 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.111418 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-config\") pod \"8a932937-131d-456b-8805-8ba3121a70e8\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.111495 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74ftm\" (UniqueName: \"kubernetes.io/projected/8a932937-131d-456b-8805-8ba3121a70e8-kube-api-access-74ftm\") pod \"8a932937-131d-456b-8805-8ba3121a70e8\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.111557 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-proxy-ca-bundles\") pod \"8a932937-131d-456b-8805-8ba3121a70e8\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.111660 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a932937-131d-456b-8805-8ba3121a70e8-serving-cert\") pod \"8a932937-131d-456b-8805-8ba3121a70e8\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.111779 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-client-ca\") pod \"8a932937-131d-456b-8805-8ba3121a70e8\" (UID: \"8a932937-131d-456b-8805-8ba3121a70e8\") " Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.112906 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-client-ca" (OuterVolumeSpecName: "client-ca") pod "8a932937-131d-456b-8805-8ba3121a70e8" (UID: "8a932937-131d-456b-8805-8ba3121a70e8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.112930 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8a932937-131d-456b-8805-8ba3121a70e8" (UID: "8a932937-131d-456b-8805-8ba3121a70e8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.113391 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-config" (OuterVolumeSpecName: "config") pod "8a932937-131d-456b-8805-8ba3121a70e8" (UID: "8a932937-131d-456b-8805-8ba3121a70e8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.121774 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a932937-131d-456b-8805-8ba3121a70e8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8a932937-131d-456b-8805-8ba3121a70e8" (UID: "8a932937-131d-456b-8805-8ba3121a70e8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.123219 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a932937-131d-456b-8805-8ba3121a70e8-kube-api-access-74ftm" (OuterVolumeSpecName: "kube-api-access-74ftm") pod "8a932937-131d-456b-8805-8ba3121a70e8" (UID: "8a932937-131d-456b-8805-8ba3121a70e8"). InnerVolumeSpecName "kube-api-access-74ftm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.126500 4721 generic.go:334] "Generic (PLEG): container finished" podID="8a932937-131d-456b-8805-8ba3121a70e8" containerID="9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a" exitCode=0 Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.126554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" event={"ID":"8a932937-131d-456b-8805-8ba3121a70e8","Type":"ContainerDied","Data":"9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a"} Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.126595 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" event={"ID":"8a932937-131d-456b-8805-8ba3121a70e8","Type":"ContainerDied","Data":"68a422c5ed0be63645c49597364abc3f39656eb73a4a97c3d527d5a86c18ef23"} Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.126620 4721 scope.go:117] "RemoveContainer" containerID="9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.126855 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bd976b8c9-c9t88" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.176698 4721 scope.go:117] "RemoveContainer" containerID="9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a" Jan 30 21:22:37 crc kubenswrapper[4721]: E0130 21:22:37.177332 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a\": container with ID starting with 9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a not found: ID does not exist" containerID="9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.177377 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a"} err="failed to get container status \"9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a\": rpc error: code = NotFound desc = could not find container \"9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a\": container with ID starting with 9f596ab51f2262c7aad9285fb278f91bc7e4e26ebd3ac5bf2b96063fa7f0376a not found: ID does not exist" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.178160 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5bd976b8c9-c9t88"] Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.181225 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5bd976b8c9-c9t88"] Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.213859 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a932937-131d-456b-8805-8ba3121a70e8-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.213939 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.214017 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.214056 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74ftm\" (UniqueName: \"kubernetes.io/projected/8a932937-131d-456b-8805-8ba3121a70e8-kube-api-access-74ftm\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:37 crc kubenswrapper[4721]: I0130 21:22:37.214076 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a932937-131d-456b-8805-8ba3121a70e8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.105855 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a932937-131d-456b-8805-8ba3121a70e8" path="/var/lib/kubelet/pods/8a932937-131d-456b-8805-8ba3121a70e8/volumes" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.114951 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-xk7jq"] Jan 30 21:22:38 crc kubenswrapper[4721]: E0130 21:22:38.115454 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a932937-131d-456b-8805-8ba3121a70e8" containerName="controller-manager" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.115499 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a932937-131d-456b-8805-8ba3121a70e8" containerName="controller-manager" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.115778 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a932937-131d-456b-8805-8ba3121a70e8" containerName="controller-manager" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.116544 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.119945 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.120817 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.121003 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.121960 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.122108 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.122137 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.137264 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.142087 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-xk7jq"] Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.232238 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvdk8\" (UniqueName: \"kubernetes.io/projected/cae9fe66-d29f-47b6-9ce8-d88325502bca-kube-api-access-vvdk8\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.232362 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-config\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.232410 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-client-ca\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.232535 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-proxy-ca-bundles\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.232571 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cae9fe66-d29f-47b6-9ce8-d88325502bca-serving-cert\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.333895 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-proxy-ca-bundles\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.333985 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cae9fe66-d29f-47b6-9ce8-d88325502bca-serving-cert\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.334132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvdk8\" (UniqueName: \"kubernetes.io/projected/cae9fe66-d29f-47b6-9ce8-d88325502bca-kube-api-access-vvdk8\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.334182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-config\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.334215 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-client-ca\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.336824 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-client-ca\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.337468 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-config\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.340771 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cae9fe66-d29f-47b6-9ce8-d88325502bca-proxy-ca-bundles\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.343788 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cae9fe66-d29f-47b6-9ce8-d88325502bca-serving-cert\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.370401 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvdk8\" (UniqueName: \"kubernetes.io/projected/cae9fe66-d29f-47b6-9ce8-d88325502bca-kube-api-access-vvdk8\") pod \"controller-manager-65cd7b5985-xk7jq\" (UID: \"cae9fe66-d29f-47b6-9ce8-d88325502bca\") " pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.450150 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:38 crc kubenswrapper[4721]: I0130 21:22:38.932705 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cd7b5985-xk7jq"] Jan 30 21:22:38 crc kubenswrapper[4721]: W0130 21:22:38.938399 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcae9fe66_d29f_47b6_9ce8_d88325502bca.slice/crio-6d7c201a0aeefc5c4ba06ab85f58d770e23ea2f0afe3467b513285d6d9de2ce0 WatchSource:0}: Error finding container 6d7c201a0aeefc5c4ba06ab85f58d770e23ea2f0afe3467b513285d6d9de2ce0: Status 404 returned error can't find the container with id 6d7c201a0aeefc5c4ba06ab85f58d770e23ea2f0afe3467b513285d6d9de2ce0 Jan 30 21:22:39 crc kubenswrapper[4721]: I0130 21:22:39.152134 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" event={"ID":"cae9fe66-d29f-47b6-9ce8-d88325502bca","Type":"ContainerStarted","Data":"ad4e9139c559ccfcb497b5314e35299563a186c58cdc9b9139af6f34e1a40ce8"} Jan 30 21:22:39 crc kubenswrapper[4721]: I0130 21:22:39.152868 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:39 crc kubenswrapper[4721]: I0130 21:22:39.152888 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" event={"ID":"cae9fe66-d29f-47b6-9ce8-d88325502bca","Type":"ContainerStarted","Data":"6d7c201a0aeefc5c4ba06ab85f58d770e23ea2f0afe3467b513285d6d9de2ce0"} Jan 30 21:22:39 crc kubenswrapper[4721]: I0130 21:22:39.154410 4721 patch_prober.go:28] interesting pod/controller-manager-65cd7b5985-xk7jq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" start-of-body= Jan 30 21:22:39 crc kubenswrapper[4721]: I0130 21:22:39.154457 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" podUID="cae9fe66-d29f-47b6-9ce8-d88325502bca" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" Jan 30 21:22:39 crc kubenswrapper[4721]: I0130 21:22:39.183539 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" podStartSLOduration=3.183502135 podStartE2EDuration="3.183502135s" podCreationTimestamp="2026-01-30 21:22:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:22:39.176285773 +0000 UTC m=+347.968187039" watchObservedRunningTime="2026-01-30 21:22:39.183502135 +0000 UTC m=+347.975403401" Jan 30 21:22:40 crc kubenswrapper[4721]: I0130 21:22:40.168021 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-65cd7b5985-xk7jq" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.446334 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wwwvs"] Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.448433 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.466937 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wwwvs"] Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560256 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/57b8072a-5fbd-4c92-9011-40e53d6fc532-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560364 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/57b8072a-5fbd-4c92-9011-40e53d6fc532-registry-certificates\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560440 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp2w8\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-kube-api-access-sp2w8\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560489 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560529 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-bound-sa-token\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560671 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/57b8072a-5fbd-4c92-9011-40e53d6fc532-trusted-ca\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560712 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-registry-tls\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.560862 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/57b8072a-5fbd-4c92-9011-40e53d6fc532-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.598707 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.662870 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/57b8072a-5fbd-4c92-9011-40e53d6fc532-registry-certificates\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.663357 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp2w8\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-kube-api-access-sp2w8\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.663540 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-bound-sa-token\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.663750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/57b8072a-5fbd-4c92-9011-40e53d6fc532-trusted-ca\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.663948 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-registry-tls\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.664104 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/57b8072a-5fbd-4c92-9011-40e53d6fc532-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.664241 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/57b8072a-5fbd-4c92-9011-40e53d6fc532-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.664675 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/57b8072a-5fbd-4c92-9011-40e53d6fc532-registry-certificates\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.665245 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/57b8072a-5fbd-4c92-9011-40e53d6fc532-trusted-ca\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.665354 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/57b8072a-5fbd-4c92-9011-40e53d6fc532-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.671637 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-registry-tls\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.671679 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/57b8072a-5fbd-4c92-9011-40e53d6fc532-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.684025 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-bound-sa-token\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.686569 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp2w8\" (UniqueName: \"kubernetes.io/projected/57b8072a-5fbd-4c92-9011-40e53d6fc532-kube-api-access-sp2w8\") pod \"image-registry-66df7c8f76-wwwvs\" (UID: \"57b8072a-5fbd-4c92-9011-40e53d6fc532\") " pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:57 crc kubenswrapper[4721]: I0130 21:22:57.796661 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:58 crc kubenswrapper[4721]: I0130 21:22:58.322855 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wwwvs"] Jan 30 21:22:58 crc kubenswrapper[4721]: W0130 21:22:58.330052 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57b8072a_5fbd_4c92_9011_40e53d6fc532.slice/crio-4620398d209e970f8e0ceef6ea13bae230f7d35d3d35f286a4960b6d73ba0afe WatchSource:0}: Error finding container 4620398d209e970f8e0ceef6ea13bae230f7d35d3d35f286a4960b6d73ba0afe: Status 404 returned error can't find the container with id 4620398d209e970f8e0ceef6ea13bae230f7d35d3d35f286a4960b6d73ba0afe Jan 30 21:22:59 crc kubenswrapper[4721]: I0130 21:22:59.307754 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" event={"ID":"57b8072a-5fbd-4c92-9011-40e53d6fc532","Type":"ContainerStarted","Data":"9fc70337110cbf321a69a0e4ab14f913332777e82354953dedaed9cdf0e97b91"} Jan 30 21:22:59 crc kubenswrapper[4721]: I0130 21:22:59.308210 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:22:59 crc kubenswrapper[4721]: I0130 21:22:59.308231 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" event={"ID":"57b8072a-5fbd-4c92-9011-40e53d6fc532","Type":"ContainerStarted","Data":"4620398d209e970f8e0ceef6ea13bae230f7d35d3d35f286a4960b6d73ba0afe"} Jan 30 21:22:59 crc kubenswrapper[4721]: I0130 21:22:59.341249 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" podStartSLOduration=2.341220094 podStartE2EDuration="2.341220094s" podCreationTimestamp="2026-01-30 21:22:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:22:59.339256311 +0000 UTC m=+368.131157637" watchObservedRunningTime="2026-01-30 21:22:59.341220094 +0000 UTC m=+368.133121340" Jan 30 21:22:59 crc kubenswrapper[4721]: I0130 21:22:59.448699 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:22:59 crc kubenswrapper[4721]: I0130 21:22:59.448809 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:23:17 crc kubenswrapper[4721]: I0130 21:23:17.804493 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-wwwvs" Jan 30 21:23:17 crc kubenswrapper[4721]: I0130 21:23:17.895677 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cm92z"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.213382 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gsd7h"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.214462 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gsd7h" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="registry-server" containerID="cri-o://651a60a28f13d6c3891afca2f059e8bfbdb2c51794e41c9ade67dbaac35c9187" gracePeriod=30 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.223745 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mhm6"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.223844 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q9qsz"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.224228 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" containerID="cri-o://07b5fcb4f3070dcd172cf963310153e70380e4975d344383001dd6d28d5b580c" gracePeriod=30 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.224772 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9mhm6" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="registry-server" containerID="cri-o://e3550519af866aad0d1314812f525603aca8d79e5eae408c426efbaeaf299ee1" gracePeriod=30 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.232040 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bq7"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.232488 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w4bq7" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="registry-server" containerID="cri-o://e0baf73e55a57753bcb6a13be9e1a3321fc2e9b85cd7a003b00b00dbf7de67cf" gracePeriod=30 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.266082 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b48x7"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.266447 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b48x7" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="registry-server" containerID="cri-o://893a14f1e75ca387cfd9eefcd6a7c3253bd9c383b5f22dd831b3ba8d9af07f93" gracePeriod=30 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.272008 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szvpk"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.277900 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.279683 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szvpk"] Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.358204 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.358333 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.358623 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v97h8\" (UniqueName: \"kubernetes.io/projected/5099f2b0-69e9-481d-8cb7-c70144258515-kube-api-access-v97h8\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.461026 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.461095 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v97h8\" (UniqueName: \"kubernetes.io/projected/5099f2b0-69e9-481d-8cb7-c70144258515-kube-api-access-v97h8\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.461621 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.462898 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.469804 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.479594 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v97h8\" (UniqueName: \"kubernetes.io/projected/5099f2b0-69e9-481d-8cb7-c70144258515-kube-api-access-v97h8\") pod \"marketplace-operator-79b997595-szvpk\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.518643 4721 generic.go:334] "Generic (PLEG): container finished" podID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerID="07b5fcb4f3070dcd172cf963310153e70380e4975d344383001dd6d28d5b580c" exitCode=0 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.518784 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" event={"ID":"10335cae-c54e-4bf1-b41c-6df530ac47dc","Type":"ContainerDied","Data":"07b5fcb4f3070dcd172cf963310153e70380e4975d344383001dd6d28d5b580c"} Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.518849 4721 scope.go:117] "RemoveContainer" containerID="9ffcd0dc728edff83277dc59541f964354d90506eab04b9aa28c29eaa992426b" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.524637 4721 generic.go:334] "Generic (PLEG): container finished" podID="3179d84c-16bd-405b-ac42-38a710f7a713" containerID="e0baf73e55a57753bcb6a13be9e1a3321fc2e9b85cd7a003b00b00dbf7de67cf" exitCode=0 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.524692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bq7" event={"ID":"3179d84c-16bd-405b-ac42-38a710f7a713","Type":"ContainerDied","Data":"e0baf73e55a57753bcb6a13be9e1a3321fc2e9b85cd7a003b00b00dbf7de67cf"} Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.528965 4721 generic.go:334] "Generic (PLEG): container finished" podID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerID="651a60a28f13d6c3891afca2f059e8bfbdb2c51794e41c9ade67dbaac35c9187" exitCode=0 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.529004 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsd7h" event={"ID":"d263bfe7-d31b-445d-933f-2e1bc58a8e26","Type":"ContainerDied","Data":"651a60a28f13d6c3891afca2f059e8bfbdb2c51794e41c9ade67dbaac35c9187"} Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.538249 4721 generic.go:334] "Generic (PLEG): container finished" podID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerID="e3550519af866aad0d1314812f525603aca8d79e5eae408c426efbaeaf299ee1" exitCode=0 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.538334 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerDied","Data":"e3550519af866aad0d1314812f525603aca8d79e5eae408c426efbaeaf299ee1"} Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.541784 4721 generic.go:334] "Generic (PLEG): container finished" podID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerID="893a14f1e75ca387cfd9eefcd6a7c3253bd9c383b5f22dd831b3ba8d9af07f93" exitCode=0 Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.541929 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerDied","Data":"893a14f1e75ca387cfd9eefcd6a7c3253bd9c383b5f22dd831b3ba8d9af07f93"} Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.673769 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.710588 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.767084 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-utilities\") pod \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.767197 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-catalog-content\") pod \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.767244 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwmh5\" (UniqueName: \"kubernetes.io/projected/d263bfe7-d31b-445d-933f-2e1bc58a8e26-kube-api-access-dwmh5\") pod \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\" (UID: \"d263bfe7-d31b-445d-933f-2e1bc58a8e26\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.769223 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-utilities" (OuterVolumeSpecName: "utilities") pod "d263bfe7-d31b-445d-933f-2e1bc58a8e26" (UID: "d263bfe7-d31b-445d-933f-2e1bc58a8e26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.777715 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d263bfe7-d31b-445d-933f-2e1bc58a8e26-kube-api-access-dwmh5" (OuterVolumeSpecName: "kube-api-access-dwmh5") pod "d263bfe7-d31b-445d-933f-2e1bc58a8e26" (UID: "d263bfe7-d31b-445d-933f-2e1bc58a8e26"). InnerVolumeSpecName "kube-api-access-dwmh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.827724 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d263bfe7-d31b-445d-933f-2e1bc58a8e26" (UID: "d263bfe7-d31b-445d-933f-2e1bc58a8e26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.848895 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.862643 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.868237 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.869124 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.869151 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d263bfe7-d31b-445d-933f-2e1bc58a8e26-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.869164 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwmh5\" (UniqueName: \"kubernetes.io/projected/d263bfe7-d31b-445d-933f-2e1bc58a8e26-kube-api-access-dwmh5\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.875108 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970399 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-operator-metrics\") pod \"10335cae-c54e-4bf1-b41c-6df530ac47dc\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970461 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-trusted-ca\") pod \"10335cae-c54e-4bf1-b41c-6df530ac47dc\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970526 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tctzl\" (UniqueName: \"kubernetes.io/projected/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-kube-api-access-tctzl\") pod \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970580 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-catalog-content\") pod \"315507aa-1e32-4360-b5b0-aa3625a10b0b\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970608 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljqxv\" (UniqueName: \"kubernetes.io/projected/10335cae-c54e-4bf1-b41c-6df530ac47dc-kube-api-access-ljqxv\") pod \"10335cae-c54e-4bf1-b41c-6df530ac47dc\" (UID: \"10335cae-c54e-4bf1-b41c-6df530ac47dc\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970636 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-utilities\") pod \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970657 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-498hr\" (UniqueName: \"kubernetes.io/projected/3179d84c-16bd-405b-ac42-38a710f7a713-kube-api-access-498hr\") pod \"3179d84c-16bd-405b-ac42-38a710f7a713\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970725 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-catalog-content\") pod \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\" (UID: \"52ca26dd-2938-4cf4-b812-4a4bcb014d5b\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970752 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-utilities\") pod \"315507aa-1e32-4360-b5b0-aa3625a10b0b\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970852 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-catalog-content\") pod \"3179d84c-16bd-405b-ac42-38a710f7a713\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970884 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-utilities\") pod \"3179d84c-16bd-405b-ac42-38a710f7a713\" (UID: \"3179d84c-16bd-405b-ac42-38a710f7a713\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.970924 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhlnt\" (UniqueName: \"kubernetes.io/projected/315507aa-1e32-4360-b5b0-aa3625a10b0b-kube-api-access-qhlnt\") pod \"315507aa-1e32-4360-b5b0-aa3625a10b0b\" (UID: \"315507aa-1e32-4360-b5b0-aa3625a10b0b\") " Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.972064 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-utilities" (OuterVolumeSpecName: "utilities") pod "3179d84c-16bd-405b-ac42-38a710f7a713" (UID: "3179d84c-16bd-405b-ac42-38a710f7a713"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.972970 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-utilities" (OuterVolumeSpecName: "utilities") pod "315507aa-1e32-4360-b5b0-aa3625a10b0b" (UID: "315507aa-1e32-4360-b5b0-aa3625a10b0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.973997 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "10335cae-c54e-4bf1-b41c-6df530ac47dc" (UID: "10335cae-c54e-4bf1-b41c-6df530ac47dc"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.974077 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-utilities" (OuterVolumeSpecName: "utilities") pod "52ca26dd-2938-4cf4-b812-4a4bcb014d5b" (UID: "52ca26dd-2938-4cf4-b812-4a4bcb014d5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.992609 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-kube-api-access-tctzl" (OuterVolumeSpecName: "kube-api-access-tctzl") pod "52ca26dd-2938-4cf4-b812-4a4bcb014d5b" (UID: "52ca26dd-2938-4cf4-b812-4a4bcb014d5b"). InnerVolumeSpecName "kube-api-access-tctzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.992783 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3179d84c-16bd-405b-ac42-38a710f7a713-kube-api-access-498hr" (OuterVolumeSpecName: "kube-api-access-498hr") pod "3179d84c-16bd-405b-ac42-38a710f7a713" (UID: "3179d84c-16bd-405b-ac42-38a710f7a713"). InnerVolumeSpecName "kube-api-access-498hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.992860 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/315507aa-1e32-4360-b5b0-aa3625a10b0b-kube-api-access-qhlnt" (OuterVolumeSpecName: "kube-api-access-qhlnt") pod "315507aa-1e32-4360-b5b0-aa3625a10b0b" (UID: "315507aa-1e32-4360-b5b0-aa3625a10b0b"). InnerVolumeSpecName "kube-api-access-qhlnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.993156 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "10335cae-c54e-4bf1-b41c-6df530ac47dc" (UID: "10335cae-c54e-4bf1-b41c-6df530ac47dc"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.993363 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10335cae-c54e-4bf1-b41c-6df530ac47dc-kube-api-access-ljqxv" (OuterVolumeSpecName: "kube-api-access-ljqxv") pod "10335cae-c54e-4bf1-b41c-6df530ac47dc" (UID: "10335cae-c54e-4bf1-b41c-6df530ac47dc"). InnerVolumeSpecName "kube-api-access-ljqxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:18 crc kubenswrapper[4721]: I0130 21:23:18.998014 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3179d84c-16bd-405b-ac42-38a710f7a713" (UID: "3179d84c-16bd-405b-ac42-38a710f7a713"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.037407 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "315507aa-1e32-4360-b5b0-aa3625a10b0b" (UID: "315507aa-1e32-4360-b5b0-aa3625a10b0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.073975 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljqxv\" (UniqueName: \"kubernetes.io/projected/10335cae-c54e-4bf1-b41c-6df530ac47dc-kube-api-access-ljqxv\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074035 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074055 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-498hr\" (UniqueName: \"kubernetes.io/projected/3179d84c-16bd-405b-ac42-38a710f7a713-kube-api-access-498hr\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074073 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074093 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074110 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3179d84c-16bd-405b-ac42-38a710f7a713-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074126 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhlnt\" (UniqueName: \"kubernetes.io/projected/315507aa-1e32-4360-b5b0-aa3625a10b0b-kube-api-access-qhlnt\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074145 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074164 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/10335cae-c54e-4bf1-b41c-6df530ac47dc-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074183 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tctzl\" (UniqueName: \"kubernetes.io/projected/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-kube-api-access-tctzl\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.074203 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315507aa-1e32-4360-b5b0-aa3625a10b0b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.120710 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52ca26dd-2938-4cf4-b812-4a4bcb014d5b" (UID: "52ca26dd-2938-4cf4-b812-4a4bcb014d5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.175611 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52ca26dd-2938-4cf4-b812-4a4bcb014d5b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.215394 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szvpk"] Jan 30 21:23:19 crc kubenswrapper[4721]: W0130 21:23:19.229410 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5099f2b0_69e9_481d_8cb7_c70144258515.slice/crio-60decc451c2118dce94df84716c9e0837a2b0f432d19f6475cc5112e6cc3d886 WatchSource:0}: Error finding container 60decc451c2118dce94df84716c9e0837a2b0f432d19f6475cc5112e6cc3d886: Status 404 returned error can't find the container with id 60decc451c2118dce94df84716c9e0837a2b0f432d19f6475cc5112e6cc3d886 Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.552565 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.552554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-q9qsz" event={"ID":"10335cae-c54e-4bf1-b41c-6df530ac47dc","Type":"ContainerDied","Data":"72e0d63182060b674c4bff9078801338af160fd46676798be612bd9c1007a8e9"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.553607 4721 scope.go:117] "RemoveContainer" containerID="07b5fcb4f3070dcd172cf963310153e70380e4975d344383001dd6d28d5b580c" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.557635 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bq7" event={"ID":"3179d84c-16bd-405b-ac42-38a710f7a713","Type":"ContainerDied","Data":"2c1caf58d2eef99f9fc6e0e188aac3053f444f732c3d570bef2eb8086b71cd9f"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.557805 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bq7" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.566200 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" event={"ID":"5099f2b0-69e9-481d-8cb7-c70144258515","Type":"ContainerStarted","Data":"03c9af54a0e5c552f41d9a6633810d4c5a3402a92ee63ad795111ccffb2cb611"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.566352 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" event={"ID":"5099f2b0-69e9-481d-8cb7-c70144258515","Type":"ContainerStarted","Data":"60decc451c2118dce94df84716c9e0837a2b0f432d19f6475cc5112e6cc3d886"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.567089 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.568423 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-szvpk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.69:8080/healthz\": dial tcp 10.217.0.69:8080: connect: connection refused" start-of-body= Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.568477 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" podUID="5099f2b0-69e9-481d-8cb7-c70144258515" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.69:8080/healthz\": dial tcp 10.217.0.69:8080: connect: connection refused" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.572061 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsd7h" event={"ID":"d263bfe7-d31b-445d-933f-2e1bc58a8e26","Type":"ContainerDied","Data":"269c88f1a3a5e5ef043ed953eaebe60648d0dee9201b766f54afd895fb4f52f7"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.572095 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsd7h" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.582117 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mhm6" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.582186 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mhm6" event={"ID":"315507aa-1e32-4360-b5b0-aa3625a10b0b","Type":"ContainerDied","Data":"d13cbadd24d9d7eeab181772ec1a618abca3564d9e378a7b0ffdd52488393106"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.585013 4721 scope.go:117] "RemoveContainer" containerID="e0baf73e55a57753bcb6a13be9e1a3321fc2e9b85cd7a003b00b00dbf7de67cf" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.592603 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b48x7" event={"ID":"52ca26dd-2938-4cf4-b812-4a4bcb014d5b","Type":"ContainerDied","Data":"270d74ef20fba620f3cb07edbe23f3a5d0aa0947437aa68352f2b1b1a001a1f4"} Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.593156 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b48x7" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.609501 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" podStartSLOduration=1.6094321150000002 podStartE2EDuration="1.609432115s" podCreationTimestamp="2026-01-30 21:23:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:23:19.592962346 +0000 UTC m=+388.384863592" watchObservedRunningTime="2026-01-30 21:23:19.609432115 +0000 UTC m=+388.401333361" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.620268 4721 scope.go:117] "RemoveContainer" containerID="ce06b7dd514e72160aeb454a4b9f7708f6e8d995e3646acd3d0ddd59f602cd37" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.631781 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q9qsz"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.635678 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-q9qsz"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.644985 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bq7"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.659177 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bq7"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.659219 4721 scope.go:117] "RemoveContainer" containerID="5a50adaaf24492169432488a9ec267b73c5de21f5d95e005c336cfba5a132ccc" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.689647 4721 scope.go:117] "RemoveContainer" containerID="651a60a28f13d6c3891afca2f059e8bfbdb2c51794e41c9ade67dbaac35c9187" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.689865 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mhm6"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.696066 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9mhm6"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.703454 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gsd7h"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.711246 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gsd7h"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.711319 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b48x7"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.711331 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b48x7"] Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.715227 4721 scope.go:117] "RemoveContainer" containerID="2816fa78bf98e536f7497588f62518ae3265ee4761d51f043bd8e1c119a96029" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.736590 4721 scope.go:117] "RemoveContainer" containerID="16f98f16036e7786720e86e5ce770dc12843f8a68367d429379431c90461cfac" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.756803 4721 scope.go:117] "RemoveContainer" containerID="e3550519af866aad0d1314812f525603aca8d79e5eae408c426efbaeaf299ee1" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.773816 4721 scope.go:117] "RemoveContainer" containerID="69a7e4800e1c5ff03b1a0ccc9acb6418e43ca9b0be6ce78868b813eff7c9dee3" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.801272 4721 scope.go:117] "RemoveContainer" containerID="df1458580e6c0d0c5af90a86d35bbca3c9e9cad4049beb81a85d67c536d1ea8a" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.825854 4721 scope.go:117] "RemoveContainer" containerID="893a14f1e75ca387cfd9eefcd6a7c3253bd9c383b5f22dd831b3ba8d9af07f93" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.846690 4721 scope.go:117] "RemoveContainer" containerID="74517c4563f1526bf657769a76bb8ca3079b1bf133236cbd484e73a03c9e6683" Jan 30 21:23:19 crc kubenswrapper[4721]: I0130 21:23:19.873952 4721 scope.go:117] "RemoveContainer" containerID="f6d86458990fe2078e82327fdc54227fe9da2308dc9ebbe5b973ed40396a903e" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.104970 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" path="/var/lib/kubelet/pods/10335cae-c54e-4bf1-b41c-6df530ac47dc/volumes" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.107656 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" path="/var/lib/kubelet/pods/315507aa-1e32-4360-b5b0-aa3625a10b0b/volumes" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.108945 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" path="/var/lib/kubelet/pods/3179d84c-16bd-405b-ac42-38a710f7a713/volumes" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.111101 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" path="/var/lib/kubelet/pods/52ca26dd-2938-4cf4-b812-4a4bcb014d5b/volumes" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.112555 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" path="/var/lib/kubelet/pods/d263bfe7-d31b-445d-933f-2e1bc58a8e26/volumes" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.432711 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-crqs7"] Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433094 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433113 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433123 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433132 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433148 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433156 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433168 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433176 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433185 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433196 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433212 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433221 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433238 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433246 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433256 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433264 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433273 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433281 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="extract-utilities" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433290 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433319 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433334 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433343 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="extract-content" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433353 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433361 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433373 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433381 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433499 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3179d84c-16bd-405b-ac42-38a710f7a713" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433516 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="315507aa-1e32-4360-b5b0-aa3625a10b0b" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433525 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433538 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d263bfe7-d31b-445d-933f-2e1bc58a8e26" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433554 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433564 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="52ca26dd-2938-4cf4-b812-4a4bcb014d5b" containerName="registry-server" Jan 30 21:23:20 crc kubenswrapper[4721]: E0130 21:23:20.433678 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.433688 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="10335cae-c54e-4bf1-b41c-6df530ac47dc" containerName="marketplace-operator" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.434720 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.437354 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.443132 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-crqs7"] Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.503889 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt6z6\" (UniqueName: \"kubernetes.io/projected/b239aaa0-3dcf-4562-82ef-efe1163a2808-kube-api-access-dt6z6\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.504237 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-utilities\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.504321 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-catalog-content\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.605863 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt6z6\" (UniqueName: \"kubernetes.io/projected/b239aaa0-3dcf-4562-82ef-efe1163a2808-kube-api-access-dt6z6\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.605981 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-utilities\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.606027 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-catalog-content\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.606929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-catalog-content\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.607043 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-utilities\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.628937 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8szh7"] Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.630553 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.634024 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.635936 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.638805 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt6z6\" (UniqueName: \"kubernetes.io/projected/b239aaa0-3dcf-4562-82ef-efe1163a2808-kube-api-access-dt6z6\") pod \"redhat-marketplace-crqs7\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.648472 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8szh7"] Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.707935 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-catalog-content\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.708059 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-utilities\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.708110 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqhqr\" (UniqueName: \"kubernetes.io/projected/98c82411-35ff-4d85-9ddf-c65a5454ec6b-kube-api-access-sqhqr\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.765441 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.810059 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-catalog-content\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.810134 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-utilities\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.810177 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqhqr\" (UniqueName: \"kubernetes.io/projected/98c82411-35ff-4d85-9ddf-c65a5454ec6b-kube-api-access-sqhqr\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.812546 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-utilities\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.812693 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-catalog-content\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.831228 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqhqr\" (UniqueName: \"kubernetes.io/projected/98c82411-35ff-4d85-9ddf-c65a5454ec6b-kube-api-access-sqhqr\") pod \"redhat-operators-8szh7\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.983557 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-crqs7"] Jan 30 21:23:20 crc kubenswrapper[4721]: I0130 21:23:20.992831 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:20 crc kubenswrapper[4721]: W0130 21:23:20.994173 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb239aaa0_3dcf_4562_82ef_efe1163a2808.slice/crio-4a194d87584bccd4c41abf550d5d52decba0527448a6791c3bd6bd9c1f0b1dc5 WatchSource:0}: Error finding container 4a194d87584bccd4c41abf550d5d52decba0527448a6791c3bd6bd9c1f0b1dc5: Status 404 returned error can't find the container with id 4a194d87584bccd4c41abf550d5d52decba0527448a6791c3bd6bd9c1f0b1dc5 Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.400199 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8szh7"] Jan 30 21:23:21 crc kubenswrapper[4721]: W0130 21:23:21.412592 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98c82411_35ff_4d85_9ddf_c65a5454ec6b.slice/crio-304d68bbcbbccb9a4cb916fb432ae8609ecd0e312f483ec60e561040f4db8d8c WatchSource:0}: Error finding container 304d68bbcbbccb9a4cb916fb432ae8609ecd0e312f483ec60e561040f4db8d8c: Status 404 returned error can't find the container with id 304d68bbcbbccb9a4cb916fb432ae8609ecd0e312f483ec60e561040f4db8d8c Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.635826 4721 generic.go:334] "Generic (PLEG): container finished" podID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerID="ce0e66579bcad7b9dfb0e40baa037f1af715286594250b46f7f449bc5a25b0b2" exitCode=0 Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.635901 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerDied","Data":"ce0e66579bcad7b9dfb0e40baa037f1af715286594250b46f7f449bc5a25b0b2"} Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.635934 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerStarted","Data":"4a194d87584bccd4c41abf550d5d52decba0527448a6791c3bd6bd9c1f0b1dc5"} Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.639133 4721 generic.go:334] "Generic (PLEG): container finished" podID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerID="3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2" exitCode=0 Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.639208 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerDied","Data":"3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2"} Jan 30 21:23:21 crc kubenswrapper[4721]: I0130 21:23:21.639292 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerStarted","Data":"304d68bbcbbccb9a4cb916fb432ae8609ecd0e312f483ec60e561040f4db8d8c"} Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.645982 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerStarted","Data":"0acb986570e5f591297f9da683794272f26a9392005a1eab30571a212d8dfbcb"} Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.650347 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerStarted","Data":"3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992"} Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.824562 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8vkbb"] Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.826400 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.829555 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.839167 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8vkbb"] Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.940996 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-utilities\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.941076 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68jcj\" (UniqueName: \"kubernetes.io/projected/0401e150-6ab2-4094-8523-472d73c16449-kube-api-access-68jcj\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:22 crc kubenswrapper[4721]: I0130 21:23:22.941189 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-catalog-content\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.025378 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h24b6"] Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.026767 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.029004 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.037264 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h24b6"] Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.042342 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-catalog-content\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.042398 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-utilities\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.042419 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68jcj\" (UniqueName: \"kubernetes.io/projected/0401e150-6ab2-4094-8523-472d73c16449-kube-api-access-68jcj\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.043040 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-catalog-content\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.043290 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-utilities\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.064348 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68jcj\" (UniqueName: \"kubernetes.io/projected/0401e150-6ab2-4094-8523-472d73c16449-kube-api-access-68jcj\") pod \"certified-operators-8vkbb\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.143995 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9g6s\" (UniqueName: \"kubernetes.io/projected/39cffd6b-fa7b-48fe-b3df-d312891d00d1-kube-api-access-t9g6s\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.144180 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-utilities\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.144267 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-catalog-content\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.188730 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.246687 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-utilities\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.247158 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-catalog-content\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.247252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9g6s\" (UniqueName: \"kubernetes.io/projected/39cffd6b-fa7b-48fe-b3df-d312891d00d1-kube-api-access-t9g6s\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.247670 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-utilities\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.248280 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-catalog-content\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.275841 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9g6s\" (UniqueName: \"kubernetes.io/projected/39cffd6b-fa7b-48fe-b3df-d312891d00d1-kube-api-access-t9g6s\") pod \"community-operators-h24b6\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.342581 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.640577 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8vkbb"] Jan 30 21:23:23 crc kubenswrapper[4721]: W0130 21:23:23.646886 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0401e150_6ab2_4094_8523_472d73c16449.slice/crio-2a9196ce11939834a510e71fcce5a2e50f1cfbcc12875b99cc95128928a7746c WatchSource:0}: Error finding container 2a9196ce11939834a510e71fcce5a2e50f1cfbcc12875b99cc95128928a7746c: Status 404 returned error can't find the container with id 2a9196ce11939834a510e71fcce5a2e50f1cfbcc12875b99cc95128928a7746c Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.657663 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerStarted","Data":"2a9196ce11939834a510e71fcce5a2e50f1cfbcc12875b99cc95128928a7746c"} Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.660238 4721 generic.go:334] "Generic (PLEG): container finished" podID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerID="3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992" exitCode=0 Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.660344 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerDied","Data":"3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992"} Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.662407 4721 generic.go:334] "Generic (PLEG): container finished" podID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerID="0acb986570e5f591297f9da683794272f26a9392005a1eab30571a212d8dfbcb" exitCode=0 Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.662494 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerDied","Data":"0acb986570e5f591297f9da683794272f26a9392005a1eab30571a212d8dfbcb"} Jan 30 21:23:23 crc kubenswrapper[4721]: I0130 21:23:23.758594 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h24b6"] Jan 30 21:23:23 crc kubenswrapper[4721]: W0130 21:23:23.765237 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39cffd6b_fa7b_48fe_b3df_d312891d00d1.slice/crio-459fd0384c5d77fcfe967715f023633dfd61a606b767a180e07f29a7b64fc670 WatchSource:0}: Error finding container 459fd0384c5d77fcfe967715f023633dfd61a606b767a180e07f29a7b64fc670: Status 404 returned error can't find the container with id 459fd0384c5d77fcfe967715f023633dfd61a606b767a180e07f29a7b64fc670 Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.671131 4721 generic.go:334] "Generic (PLEG): container finished" podID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerID="6fa3038d0e9d05a4acfadb13a3ece1b757faefaf8a03c23d0fec077f0df7d6c9" exitCode=0 Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.671225 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h24b6" event={"ID":"39cffd6b-fa7b-48fe-b3df-d312891d00d1","Type":"ContainerDied","Data":"6fa3038d0e9d05a4acfadb13a3ece1b757faefaf8a03c23d0fec077f0df7d6c9"} Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.671957 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h24b6" event={"ID":"39cffd6b-fa7b-48fe-b3df-d312891d00d1","Type":"ContainerStarted","Data":"459fd0384c5d77fcfe967715f023633dfd61a606b767a180e07f29a7b64fc670"} Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.673669 4721 generic.go:334] "Generic (PLEG): container finished" podID="0401e150-6ab2-4094-8523-472d73c16449" containerID="894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4" exitCode=0 Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.673720 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerDied","Data":"894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4"} Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.678892 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerStarted","Data":"22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe"} Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.683818 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerStarted","Data":"1100cb35367d56229f82220aedb1d30a28df760abce0daee5d11f4b368891d37"} Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.785255 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8szh7" podStartSLOduration=2.355498131 podStartE2EDuration="4.785224802s" podCreationTimestamp="2026-01-30 21:23:20 +0000 UTC" firstStartedPulling="2026-01-30 21:23:21.642246512 +0000 UTC m=+390.434147758" lastFinishedPulling="2026-01-30 21:23:24.071973183 +0000 UTC m=+392.863874429" observedRunningTime="2026-01-30 21:23:24.758351073 +0000 UTC m=+393.550252329" watchObservedRunningTime="2026-01-30 21:23:24.785224802 +0000 UTC m=+393.577126048" Jan 30 21:23:24 crc kubenswrapper[4721]: I0130 21:23:24.786997 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-crqs7" podStartSLOduration=2.325304445 podStartE2EDuration="4.786990638s" podCreationTimestamp="2026-01-30 21:23:20 +0000 UTC" firstStartedPulling="2026-01-30 21:23:21.638276464 +0000 UTC m=+390.430177710" lastFinishedPulling="2026-01-30 21:23:24.099962617 +0000 UTC m=+392.891863903" observedRunningTime="2026-01-30 21:23:24.778738672 +0000 UTC m=+393.570639918" watchObservedRunningTime="2026-01-30 21:23:24.786990638 +0000 UTC m=+393.578891884" Jan 30 21:23:25 crc kubenswrapper[4721]: I0130 21:23:25.694015 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerStarted","Data":"a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252"} Jan 30 21:23:26 crc kubenswrapper[4721]: I0130 21:23:26.701645 4721 generic.go:334] "Generic (PLEG): container finished" podID="0401e150-6ab2-4094-8523-472d73c16449" containerID="a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252" exitCode=0 Jan 30 21:23:26 crc kubenswrapper[4721]: I0130 21:23:26.701703 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerDied","Data":"a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252"} Jan 30 21:23:29 crc kubenswrapper[4721]: I0130 21:23:29.448947 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:23:29 crc kubenswrapper[4721]: I0130 21:23:29.449861 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.755235 4721 generic.go:334] "Generic (PLEG): container finished" podID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerID="31a09101e180896c63605c07ac5e6993361efa1f7a9837c0abde727a6a764532" exitCode=0 Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.755357 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h24b6" event={"ID":"39cffd6b-fa7b-48fe-b3df-d312891d00d1","Type":"ContainerDied","Data":"31a09101e180896c63605c07ac5e6993361efa1f7a9837c0abde727a6a764532"} Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.766511 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.766756 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.772692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerStarted","Data":"6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886"} Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.807926 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8vkbb" podStartSLOduration=3.779246267 podStartE2EDuration="8.807893585s" podCreationTimestamp="2026-01-30 21:23:22 +0000 UTC" firstStartedPulling="2026-01-30 21:23:24.680267711 +0000 UTC m=+393.472168997" lastFinishedPulling="2026-01-30 21:23:29.708915069 +0000 UTC m=+398.500816315" observedRunningTime="2026-01-30 21:23:30.806346446 +0000 UTC m=+399.598247712" watchObservedRunningTime="2026-01-30 21:23:30.807893585 +0000 UTC m=+399.599794831" Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.835360 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.994538 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:30 crc kubenswrapper[4721]: I0130 21:23:30.994987 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:31 crc kubenswrapper[4721]: I0130 21:23:31.051341 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:31 crc kubenswrapper[4721]: I0130 21:23:31.788731 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h24b6" event={"ID":"39cffd6b-fa7b-48fe-b3df-d312891d00d1","Type":"ContainerStarted","Data":"fb1cea13519959ac21954e85737e98bc028dd045eceafbf562105892552eae6d"} Jan 30 21:23:31 crc kubenswrapper[4721]: I0130 21:23:31.818799 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h24b6" podStartSLOduration=2.291647508 podStartE2EDuration="8.818779807s" podCreationTimestamp="2026-01-30 21:23:23 +0000 UTC" firstStartedPulling="2026-01-30 21:23:24.673137111 +0000 UTC m=+393.465038397" lastFinishedPulling="2026-01-30 21:23:31.20026945 +0000 UTC m=+399.992170696" observedRunningTime="2026-01-30 21:23:31.815813142 +0000 UTC m=+400.607714408" watchObservedRunningTime="2026-01-30 21:23:31.818779807 +0000 UTC m=+400.610681053" Jan 30 21:23:31 crc kubenswrapper[4721]: I0130 21:23:31.856632 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:23:31 crc kubenswrapper[4721]: I0130 21:23:31.883138 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:23:33 crc kubenswrapper[4721]: I0130 21:23:33.190872 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:33 crc kubenswrapper[4721]: I0130 21:23:33.191795 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:33 crc kubenswrapper[4721]: I0130 21:23:33.233674 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:33 crc kubenswrapper[4721]: I0130 21:23:33.343622 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:33 crc kubenswrapper[4721]: I0130 21:23:33.343695 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:33 crc kubenswrapper[4721]: I0130 21:23:33.400776 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:34 crc kubenswrapper[4721]: I0130 21:23:34.896667 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:23:42 crc kubenswrapper[4721]: I0130 21:23:42.957533 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" podUID="b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" containerName="registry" containerID="cri-o://b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d" gracePeriod=30 Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.392841 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.422333 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.500635 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-bound-sa-token\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.500737 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-certificates\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.501111 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.501157 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-installation-pull-secrets\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.501192 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-tls\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.501238 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdbnn\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-kube-api-access-vdbnn\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.501314 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-ca-trust-extracted\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.501336 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-trusted-ca\") pod \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\" (UID: \"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70\") " Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.502267 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.502678 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.503083 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.508086 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-kube-api-access-vdbnn" (OuterVolumeSpecName: "kube-api-access-vdbnn") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "kube-api-access-vdbnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.508429 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.508593 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.513281 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.528031 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.533813 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" (UID: "b9ef3d5f-27fb-41ce-a6a3-ccd017690b70"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.604781 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.604866 4721 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.604895 4721 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.604914 4721 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.604936 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdbnn\" (UniqueName: \"kubernetes.io/projected/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-kube-api-access-vdbnn\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.604960 4721 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.872915 4721 generic.go:334] "Generic (PLEG): container finished" podID="b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" containerID="b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d" exitCode=0 Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.873043 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" event={"ID":"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70","Type":"ContainerDied","Data":"b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d"} Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.873138 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" event={"ID":"b9ef3d5f-27fb-41ce-a6a3-ccd017690b70","Type":"ContainerDied","Data":"e8c125e72a60e01090d3f9dbae2164e53d4bf91be52172761e04f592d3fa20d0"} Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.873221 4721 scope.go:117] "RemoveContainer" containerID="b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.873681 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-cm92z" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.912767 4721 scope.go:117] "RemoveContainer" containerID="b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d" Jan 30 21:23:43 crc kubenswrapper[4721]: E0130 21:23:43.913547 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d\": container with ID starting with b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d not found: ID does not exist" containerID="b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.913605 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d"} err="failed to get container status \"b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d\": rpc error: code = NotFound desc = could not find container \"b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d\": container with ID starting with b73086c439bdedda9f7e81d7d0102fac3fc1a1a904d36f5cce92e7d6374d207d not found: ID does not exist" Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.931107 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cm92z"] Jan 30 21:23:43 crc kubenswrapper[4721]: I0130 21:23:43.935721 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-cm92z"] Jan 30 21:23:44 crc kubenswrapper[4721]: I0130 21:23:44.105665 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" path="/var/lib/kubelet/pods/b9ef3d5f-27fb-41ce-a6a3-ccd017690b70/volumes" Jan 30 21:23:59 crc kubenswrapper[4721]: I0130 21:23:59.448327 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:23:59 crc kubenswrapper[4721]: I0130 21:23:59.449098 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:23:59 crc kubenswrapper[4721]: I0130 21:23:59.449167 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:23:59 crc kubenswrapper[4721]: I0130 21:23:59.450156 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"22a9d3c345d244973800e77dbe9cd0a1ed3e1b5e0c717d90032aeb0efb193236"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:23:59 crc kubenswrapper[4721]: I0130 21:23:59.450268 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://22a9d3c345d244973800e77dbe9cd0a1ed3e1b5e0c717d90032aeb0efb193236" gracePeriod=600 Jan 30 21:24:00 crc kubenswrapper[4721]: I0130 21:24:00.007702 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="22a9d3c345d244973800e77dbe9cd0a1ed3e1b5e0c717d90032aeb0efb193236" exitCode=0 Jan 30 21:24:00 crc kubenswrapper[4721]: I0130 21:24:00.007823 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"22a9d3c345d244973800e77dbe9cd0a1ed3e1b5e0c717d90032aeb0efb193236"} Jan 30 21:24:00 crc kubenswrapper[4721]: I0130 21:24:00.008215 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"d7ee160d63cdd492388107118b59475b29b882ae5e61afaaca4166b1fbfadf4b"} Jan 30 21:24:00 crc kubenswrapper[4721]: I0130 21:24:00.008278 4721 scope.go:117] "RemoveContainer" containerID="f9296a411f1ec3d05262aefad0e39d86bff5a87b99cfc0d14ecbb867d83dc043" Jan 30 21:25:59 crc kubenswrapper[4721]: I0130 21:25:59.449515 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:25:59 crc kubenswrapper[4721]: I0130 21:25:59.450557 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:26:29 crc kubenswrapper[4721]: I0130 21:26:29.449144 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:26:29 crc kubenswrapper[4721]: I0130 21:26:29.450145 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:26:59 crc kubenswrapper[4721]: I0130 21:26:59.449089 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:26:59 crc kubenswrapper[4721]: I0130 21:26:59.450425 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:26:59 crc kubenswrapper[4721]: I0130 21:26:59.450528 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:26:59 crc kubenswrapper[4721]: I0130 21:26:59.451854 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d7ee160d63cdd492388107118b59475b29b882ae5e61afaaca4166b1fbfadf4b"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:26:59 crc kubenswrapper[4721]: I0130 21:26:59.451983 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://d7ee160d63cdd492388107118b59475b29b882ae5e61afaaca4166b1fbfadf4b" gracePeriod=600 Jan 30 21:27:00 crc kubenswrapper[4721]: I0130 21:27:00.495152 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="d7ee160d63cdd492388107118b59475b29b882ae5e61afaaca4166b1fbfadf4b" exitCode=0 Jan 30 21:27:00 crc kubenswrapper[4721]: I0130 21:27:00.495281 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"d7ee160d63cdd492388107118b59475b29b882ae5e61afaaca4166b1fbfadf4b"} Jan 30 21:27:00 crc kubenswrapper[4721]: I0130 21:27:00.496424 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"02acc2ce27e27177088d6c3748fc2d939b3d52222280b1e9d41a45b1ef083f4a"} Jan 30 21:27:00 crc kubenswrapper[4721]: I0130 21:27:00.496504 4721 scope.go:117] "RemoveContainer" containerID="22a9d3c345d244973800e77dbe9cd0a1ed3e1b5e0c717d90032aeb0efb193236" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.059205 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg"] Jan 30 21:28:15 crc kubenswrapper[4721]: E0130 21:28:15.060553 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" containerName="registry" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.060574 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" containerName="registry" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.060703 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ef3d5f-27fb-41ce-a6a3-ccd017690b70" containerName="registry" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.061720 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.066634 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.096462 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg"] Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.237941 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.238086 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxwkt\" (UniqueName: \"kubernetes.io/projected/820b6616-f2c6-47f9-8ad6-0e196f9cd134-kube-api-access-fxwkt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.238139 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.339519 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.339630 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxwkt\" (UniqueName: \"kubernetes.io/projected/820b6616-f2c6-47f9-8ad6-0e196f9cd134-kube-api-access-fxwkt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.339685 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.340352 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.340572 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.361989 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxwkt\" (UniqueName: \"kubernetes.io/projected/820b6616-f2c6-47f9-8ad6-0e196f9cd134-kube-api-access-fxwkt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.389646 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:15 crc kubenswrapper[4721]: I0130 21:28:15.612214 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg"] Jan 30 21:28:16 crc kubenswrapper[4721]: I0130 21:28:16.133867 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" event={"ID":"820b6616-f2c6-47f9-8ad6-0e196f9cd134","Type":"ContainerStarted","Data":"0e0e838cf0eaed40994aa7c1389c82521ceb8da3027186bdb6688a96813ba776"} Jan 30 21:28:16 crc kubenswrapper[4721]: I0130 21:28:16.134449 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" event={"ID":"820b6616-f2c6-47f9-8ad6-0e196f9cd134","Type":"ContainerStarted","Data":"b57b076f45347dcb757d26f7bc66b06be7a71ee6bbfd6a3a94480f7ec4b16809"} Jan 30 21:28:17 crc kubenswrapper[4721]: I0130 21:28:17.142630 4721 generic.go:334] "Generic (PLEG): container finished" podID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerID="0e0e838cf0eaed40994aa7c1389c82521ceb8da3027186bdb6688a96813ba776" exitCode=0 Jan 30 21:28:17 crc kubenswrapper[4721]: I0130 21:28:17.142689 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" event={"ID":"820b6616-f2c6-47f9-8ad6-0e196f9cd134","Type":"ContainerDied","Data":"0e0e838cf0eaed40994aa7c1389c82521ceb8da3027186bdb6688a96813ba776"} Jan 30 21:28:17 crc kubenswrapper[4721]: I0130 21:28:17.145319 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 21:28:19 crc kubenswrapper[4721]: I0130 21:28:19.167943 4721 generic.go:334] "Generic (PLEG): container finished" podID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerID="9829b84a2ecf22682a5e30a82780d12fc53018c0b4e3588c51b13b958164e090" exitCode=0 Jan 30 21:28:19 crc kubenswrapper[4721]: I0130 21:28:19.168163 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" event={"ID":"820b6616-f2c6-47f9-8ad6-0e196f9cd134","Type":"ContainerDied","Data":"9829b84a2ecf22682a5e30a82780d12fc53018c0b4e3588c51b13b958164e090"} Jan 30 21:28:20 crc kubenswrapper[4721]: I0130 21:28:20.188908 4721 generic.go:334] "Generic (PLEG): container finished" podID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerID="b127f7d52c9d7b8b5e592bb285798253b6f5772559a5e472bd9319118dd955ff" exitCode=0 Jan 30 21:28:20 crc kubenswrapper[4721]: I0130 21:28:20.189066 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" event={"ID":"820b6616-f2c6-47f9-8ad6-0e196f9cd134","Type":"ContainerDied","Data":"b127f7d52c9d7b8b5e592bb285798253b6f5772559a5e472bd9319118dd955ff"} Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.522311 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.643803 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxwkt\" (UniqueName: \"kubernetes.io/projected/820b6616-f2c6-47f9-8ad6-0e196f9cd134-kube-api-access-fxwkt\") pod \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.643905 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-bundle\") pod \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.643937 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-util\") pod \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\" (UID: \"820b6616-f2c6-47f9-8ad6-0e196f9cd134\") " Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.648104 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-bundle" (OuterVolumeSpecName: "bundle") pod "820b6616-f2c6-47f9-8ad6-0e196f9cd134" (UID: "820b6616-f2c6-47f9-8ad6-0e196f9cd134"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.652287 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/820b6616-f2c6-47f9-8ad6-0e196f9cd134-kube-api-access-fxwkt" (OuterVolumeSpecName: "kube-api-access-fxwkt") pod "820b6616-f2c6-47f9-8ad6-0e196f9cd134" (UID: "820b6616-f2c6-47f9-8ad6-0e196f9cd134"). InnerVolumeSpecName "kube-api-access-fxwkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.655197 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-util" (OuterVolumeSpecName: "util") pod "820b6616-f2c6-47f9-8ad6-0e196f9cd134" (UID: "820b6616-f2c6-47f9-8ad6-0e196f9cd134"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.746609 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxwkt\" (UniqueName: \"kubernetes.io/projected/820b6616-f2c6-47f9-8ad6-0e196f9cd134-kube-api-access-fxwkt\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.746689 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:21 crc kubenswrapper[4721]: I0130 21:28:21.746719 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/820b6616-f2c6-47f9-8ad6-0e196f9cd134-util\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:22 crc kubenswrapper[4721]: I0130 21:28:22.208370 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" event={"ID":"820b6616-f2c6-47f9-8ad6-0e196f9cd134","Type":"ContainerDied","Data":"b57b076f45347dcb757d26f7bc66b06be7a71ee6bbfd6a3a94480f7ec4b16809"} Jan 30 21:28:22 crc kubenswrapper[4721]: I0130 21:28:22.208446 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b57b076f45347dcb757d26f7bc66b06be7a71ee6bbfd6a3a94480f7ec4b16809" Jan 30 21:28:22 crc kubenswrapper[4721]: I0130 21:28:22.208485 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.719840 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h"] Jan 30 21:28:30 crc kubenswrapper[4721]: E0130 21:28:30.721072 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="pull" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.721092 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="pull" Jan 30 21:28:30 crc kubenswrapper[4721]: E0130 21:28:30.721123 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="util" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.721132 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="util" Jan 30 21:28:30 crc kubenswrapper[4721]: E0130 21:28:30.721143 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="extract" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.721156 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="extract" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.721379 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" containerName="extract" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.721999 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.724782 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.725069 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.725273 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-hpxr6" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.738585 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.770559 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.772012 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.782811 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.787496 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-xkbt6" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.789397 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.790479 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.791159 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqstc\" (UniqueName: \"kubernetes.io/projected/64ed6731-ff9c-4a61-b696-00dcac24cb8d-kube-api-access-wqstc\") pod \"obo-prometheus-operator-68bc856cb9-lxm2h\" (UID: \"64ed6731-ff9c-4a61-b696-00dcac24cb8d\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.808078 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.829382 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.892467 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c65d4e6-4f21-4298-a63e-7390c6588e3f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-98r68\" (UID: \"5c65d4e6-4f21-4298-a63e-7390c6588e3f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.892967 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90e03b69-5f4f-469a-a8a0-82bc942a47e7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-25fb7\" (UID: \"90e03b69-5f4f-469a-a8a0-82bc942a47e7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.893083 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90e03b69-5f4f-469a-a8a0-82bc942a47e7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-25fb7\" (UID: \"90e03b69-5f4f-469a-a8a0-82bc942a47e7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.893135 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqstc\" (UniqueName: \"kubernetes.io/projected/64ed6731-ff9c-4a61-b696-00dcac24cb8d-kube-api-access-wqstc\") pod \"obo-prometheus-operator-68bc856cb9-lxm2h\" (UID: \"64ed6731-ff9c-4a61-b696-00dcac24cb8d\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.893278 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c65d4e6-4f21-4298-a63e-7390c6588e3f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-98r68\" (UID: \"5c65d4e6-4f21-4298-a63e-7390c6588e3f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.926993 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqstc\" (UniqueName: \"kubernetes.io/projected/64ed6731-ff9c-4a61-b696-00dcac24cb8d-kube-api-access-wqstc\") pod \"obo-prometheus-operator-68bc856cb9-lxm2h\" (UID: \"64ed6731-ff9c-4a61-b696-00dcac24cb8d\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.940943 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-rw7xg"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.942065 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.944854 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.945018 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-4fms4" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.957254 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-rw7xg"] Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.995447 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c65d4e6-4f21-4298-a63e-7390c6588e3f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-98r68\" (UID: \"5c65d4e6-4f21-4298-a63e-7390c6588e3f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.995519 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c65d4e6-4f21-4298-a63e-7390c6588e3f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-98r68\" (UID: \"5c65d4e6-4f21-4298-a63e-7390c6588e3f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.995582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90e03b69-5f4f-469a-a8a0-82bc942a47e7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-25fb7\" (UID: \"90e03b69-5f4f-469a-a8a0-82bc942a47e7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.995620 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7929\" (UniqueName: \"kubernetes.io/projected/6007c115-e448-4886-9aa2-14a72217c0bd-kube-api-access-v7929\") pod \"observability-operator-59bdc8b94-rw7xg\" (UID: \"6007c115-e448-4886-9aa2-14a72217c0bd\") " pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.995677 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90e03b69-5f4f-469a-a8a0-82bc942a47e7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-25fb7\" (UID: \"90e03b69-5f4f-469a-a8a0-82bc942a47e7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:30 crc kubenswrapper[4721]: I0130 21:28:30.995705 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6007c115-e448-4886-9aa2-14a72217c0bd-observability-operator-tls\") pod \"observability-operator-59bdc8b94-rw7xg\" (UID: \"6007c115-e448-4886-9aa2-14a72217c0bd\") " pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.009485 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c65d4e6-4f21-4298-a63e-7390c6588e3f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-98r68\" (UID: \"5c65d4e6-4f21-4298-a63e-7390c6588e3f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.015956 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90e03b69-5f4f-469a-a8a0-82bc942a47e7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-25fb7\" (UID: \"90e03b69-5f4f-469a-a8a0-82bc942a47e7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.022913 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90e03b69-5f4f-469a-a8a0-82bc942a47e7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-25fb7\" (UID: \"90e03b69-5f4f-469a-a8a0-82bc942a47e7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.023933 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c65d4e6-4f21-4298-a63e-7390c6588e3f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79f76f558b-98r68\" (UID: \"5c65d4e6-4f21-4298-a63e-7390c6588e3f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.047691 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.097357 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6007c115-e448-4886-9aa2-14a72217c0bd-observability-operator-tls\") pod \"observability-operator-59bdc8b94-rw7xg\" (UID: \"6007c115-e448-4886-9aa2-14a72217c0bd\") " pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.097486 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7929\" (UniqueName: \"kubernetes.io/projected/6007c115-e448-4886-9aa2-14a72217c0bd-kube-api-access-v7929\") pod \"observability-operator-59bdc8b94-rw7xg\" (UID: \"6007c115-e448-4886-9aa2-14a72217c0bd\") " pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.107756 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.109574 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6007c115-e448-4886-9aa2-14a72217c0bd-observability-operator-tls\") pod \"observability-operator-59bdc8b94-rw7xg\" (UID: \"6007c115-e448-4886-9aa2-14a72217c0bd\") " pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.126928 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.136655 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7929\" (UniqueName: \"kubernetes.io/projected/6007c115-e448-4886-9aa2-14a72217c0bd-kube-api-access-v7929\") pod \"observability-operator-59bdc8b94-rw7xg\" (UID: \"6007c115-e448-4886-9aa2-14a72217c0bd\") " pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.189141 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-hnrqb"] Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.190542 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.193804 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-dhkx4" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.198828 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7ea1c63-c2af-4258-9864-8e09c708d507-openshift-service-ca\") pod \"perses-operator-5bf474d74f-hnrqb\" (UID: \"e7ea1c63-c2af-4258-9864-8e09c708d507\") " pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.199038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2srx\" (UniqueName: \"kubernetes.io/projected/e7ea1c63-c2af-4258-9864-8e09c708d507-kube-api-access-s2srx\") pod \"perses-operator-5bf474d74f-hnrqb\" (UID: \"e7ea1c63-c2af-4258-9864-8e09c708d507\") " pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.205875 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-hnrqb"] Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.260905 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.299860 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7ea1c63-c2af-4258-9864-8e09c708d507-openshift-service-ca\") pod \"perses-operator-5bf474d74f-hnrqb\" (UID: \"e7ea1c63-c2af-4258-9864-8e09c708d507\") " pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.299943 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2srx\" (UniqueName: \"kubernetes.io/projected/e7ea1c63-c2af-4258-9864-8e09c708d507-kube-api-access-s2srx\") pod \"perses-operator-5bf474d74f-hnrqb\" (UID: \"e7ea1c63-c2af-4258-9864-8e09c708d507\") " pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.301337 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e7ea1c63-c2af-4258-9864-8e09c708d507-openshift-service-ca\") pod \"perses-operator-5bf474d74f-hnrqb\" (UID: \"e7ea1c63-c2af-4258-9864-8e09c708d507\") " pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.326122 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2srx\" (UniqueName: \"kubernetes.io/projected/e7ea1c63-c2af-4258-9864-8e09c708d507-kube-api-access-s2srx\") pod \"perses-operator-5bf474d74f-hnrqb\" (UID: \"e7ea1c63-c2af-4258-9864-8e09c708d507\") " pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.531227 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.563684 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68"] Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.732709 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-rw7xg"] Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.812835 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7"] Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.833464 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-hnrqb"] Jan 30 21:28:31 crc kubenswrapper[4721]: I0130 21:28:31.844833 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h"] Jan 30 21:28:32 crc kubenswrapper[4721]: I0130 21:28:32.296084 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" event={"ID":"5c65d4e6-4f21-4298-a63e-7390c6588e3f","Type":"ContainerStarted","Data":"7d92ee41b1fc70f9074ce5f1373340f5eef0f6c46b9cace47e9a78a7c7870c57"} Jan 30 21:28:32 crc kubenswrapper[4721]: I0130 21:28:32.297542 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" event={"ID":"90e03b69-5f4f-469a-a8a0-82bc942a47e7","Type":"ContainerStarted","Data":"f01e818fb8b9e7f2f6f57e452cbfe85e0a1ac70ea6fcb92eee0f54c28fa2ebdf"} Jan 30 21:28:32 crc kubenswrapper[4721]: I0130 21:28:32.299193 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" event={"ID":"e7ea1c63-c2af-4258-9864-8e09c708d507","Type":"ContainerStarted","Data":"b7b113ae86b1be403c9e1eee2b827a5b1d07f373b41abebd54b5c6b62113151d"} Jan 30 21:28:32 crc kubenswrapper[4721]: I0130 21:28:32.300461 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" event={"ID":"64ed6731-ff9c-4a61-b696-00dcac24cb8d","Type":"ContainerStarted","Data":"51908f7cae3b6631141fe5da1201675b7c439ef9ee726b211e7a0d718f95bc14"} Jan 30 21:28:32 crc kubenswrapper[4721]: I0130 21:28:32.301739 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" event={"ID":"6007c115-e448-4886-9aa2-14a72217c0bd","Type":"ContainerStarted","Data":"d401d2d360ed9723066486166ad52fcdeb3582fe8293d0e35048c6102e9810d9"} Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.627688 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2p5n5"] Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630267 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-controller" containerID="cri-o://a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630437 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630506 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-acl-logging" containerID="cri-o://dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630628 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-node" containerID="cri-o://17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630657 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="sbdb" containerID="cri-o://08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630508 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="northd" containerID="cri-o://6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.630337 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="nbdb" containerID="cri-o://24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf" gracePeriod=30 Jan 30 21:28:43 crc kubenswrapper[4721]: I0130 21:28:43.735509 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" containerID="cri-o://2d6a1248435f7b74f1093fccf453e5e79a2b66fb79882ee43d359ff897c280e2" gracePeriod=30 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.425107 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/2.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.425823 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/1.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.425879 4721 generic.go:334] "Generic (PLEG): container finished" podID="62d4c2ec-791a-4f32-8ba0-118cac4e72e5" containerID="b7bb92494e4fef088d7d6741d3a0314fed401e904d1675f21988157c35a6a12c" exitCode=2 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.425951 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerDied","Data":"b7bb92494e4fef088d7d6741d3a0314fed401e904d1675f21988157c35a6a12c"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.426000 4721 scope.go:117] "RemoveContainer" containerID="0a225c83a5ba072bd73951a8721bb0362e531c2b020430d057dda100ac728f04" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.426709 4721 scope.go:117] "RemoveContainer" containerID="b7bb92494e4fef088d7d6741d3a0314fed401e904d1675f21988157c35a6a12c" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.426913 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-g7fgc_openshift-multus(62d4c2ec-791a-4f32-8ba0-118cac4e72e5)\"" pod="openshift-multus/multus-g7fgc" podUID="62d4c2ec-791a-4f32-8ba0-118cac4e72e5" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.433788 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovnkube-controller/3.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.436289 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovn-acl-logging/0.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.436795 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovn-controller/0.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437192 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="2d6a1248435f7b74f1093fccf453e5e79a2b66fb79882ee43d359ff897c280e2" exitCode=0 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437222 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a" exitCode=0 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437232 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf" exitCode=0 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437242 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7" exitCode=0 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437251 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c" exitCode=0 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437259 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323" exitCode=0 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437267 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d" exitCode=143 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437274 4721 generic.go:334] "Generic (PLEG): container finished" podID="f295c622-6366-498b-b846-24316b3ad5b7" containerID="a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5" exitCode=143 Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437273 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"2d6a1248435f7b74f1093fccf453e5e79a2b66fb79882ee43d359ff897c280e2"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437354 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437395 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437408 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437422 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437435 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437446 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.437462 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5"} Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.672421 4721 scope.go:117] "RemoveContainer" containerID="89218ce1268f48aa91729c007754164bb15a48a63e0bd16d216c12213dec48f4" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.723865 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovn-acl-logging/0.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.731206 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovn-controller/0.log" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.731977 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863460 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-ovn\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863519 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-systemd\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863541 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-bin\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863571 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9mr8\" (UniqueName: \"kubernetes.io/projected/f295c622-6366-498b-b846-24316b3ad5b7-kube-api-access-q9mr8\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863589 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-slash\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863612 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863636 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-env-overrides\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863655 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-node-log\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863671 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-log-socket\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863702 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-kubelet\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863722 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-var-lib-openvswitch\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863736 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-netns\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863755 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-systemd-units\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863776 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-config\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863796 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-etc-openvswitch\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863814 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-netd\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863835 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-openvswitch\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863853 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-script-lib\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863885 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f295c622-6366-498b-b846-24316b3ad5b7-ovn-node-metrics-cert\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.863909 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-ovn-kubernetes\") pod \"f295c622-6366-498b-b846-24316b3ad5b7\" (UID: \"f295c622-6366-498b-b846-24316b3ad5b7\") " Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.864203 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.864240 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.869951 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870043 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870493 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870558 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870519 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870655 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-slash" (OuterVolumeSpecName: "host-slash") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870682 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.871005 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-node-log" (OuterVolumeSpecName: "node-log") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.870980 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-log-socket" (OuterVolumeSpecName: "log-socket") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.871073 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.871109 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.871131 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.871194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.903650 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.904575 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7wk86"] Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.904898 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-acl-logging" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.904919 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-acl-logging" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.904931 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.904939 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.904950 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="sbdb" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.904956 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="sbdb" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.904966 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-node" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.904972 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-node" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.904980 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="northd" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.904986 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="northd" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.904997 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905003 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.905010 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905018 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.905032 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="nbdb" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905040 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="nbdb" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.905051 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905057 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.905065 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905072 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.905083 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kubecfg-setup" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905090 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kubecfg-setup" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.905100 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.905108 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.907642 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911492 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="northd" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911522 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911540 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911550 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="sbdb" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911557 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911566 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911574 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovn-acl-logging" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911583 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911592 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="kube-rbac-proxy-node" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911605 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="nbdb" Jan 30 21:28:44 crc kubenswrapper[4721]: E0130 21:28:44.911740 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911786 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911895 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.911905 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f295c622-6366-498b-b846-24316b3ad5b7" containerName="ovnkube-controller" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.913954 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.929590 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f295c622-6366-498b-b846-24316b3ad5b7-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.930499 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f295c622-6366-498b-b846-24316b3ad5b7-kube-api-access-q9mr8" (OuterVolumeSpecName: "kube-api-access-q9mr8") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "kube-api-access-q9mr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.946286 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f295c622-6366-498b-b846-24316b3ad5b7" (UID: "f295c622-6366-498b-b846-24316b3ad5b7"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965147 4721 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965183 4721 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965195 4721 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965204 4721 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965213 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965222 4721 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965231 4721 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965240 4721 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965249 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965257 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f295c622-6366-498b-b846-24316b3ad5b7-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965268 4721 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965277 4721 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965285 4721 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965314 4721 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965323 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9mr8\" (UniqueName: \"kubernetes.io/projected/f295c622-6366-498b-b846-24316b3ad5b7-kube-api-access-q9mr8\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965332 4721 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-slash\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965340 4721 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965350 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f295c622-6366-498b-b846-24316b3ad5b7-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965360 4721 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-node-log\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:44 crc kubenswrapper[4721]: I0130 21:28:44.965368 4721 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f295c622-6366-498b-b846-24316b3ad5b7-log-socket\") on node \"crc\" DevicePath \"\"" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069170 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-slash\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069244 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn7d7\" (UniqueName: \"kubernetes.io/projected/c8e76699-472b-4945-b1b3-47f55c922ec9-kube-api-access-dn7d7\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069276 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-kubelet\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069332 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-log-socket\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069363 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-cni-bin\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069387 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-ovn\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069411 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-ovnkube-script-lib\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069441 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069464 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-run-ovn-kubernetes\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069497 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-env-overrides\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069531 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-var-lib-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069562 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069593 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-systemd-units\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069617 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-cni-netd\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069661 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-run-netns\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069685 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-node-log\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069717 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-systemd\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069741 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-ovnkube-config\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069770 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-etc-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.069795 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c8e76699-472b-4945-b1b3-47f55c922ec9-ovn-node-metrics-cert\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170479 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-run-netns\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170531 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-node-log\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170552 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-systemd\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170571 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-ovnkube-config\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170588 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-etc-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170615 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c8e76699-472b-4945-b1b3-47f55c922ec9-ovn-node-metrics-cert\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170594 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-run-netns\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170660 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-slash\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170631 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-systemd\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170637 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-slash\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170703 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-node-log\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn7d7\" (UniqueName: \"kubernetes.io/projected/c8e76699-472b-4945-b1b3-47f55c922ec9-kube-api-access-dn7d7\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170819 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-kubelet\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170846 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-log-socket\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170914 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-cni-bin\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170938 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-ovn\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.170960 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-ovnkube-script-lib\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171006 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171027 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-run-ovn-kubernetes\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-env-overrides\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171107 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-var-lib-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171160 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-systemd-units\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171207 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-cni-netd\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171277 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-kubelet\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171349 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171351 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-run-ovn-kubernetes\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171389 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171420 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-systemd-units\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171440 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-cni-netd\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171460 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-log-socket\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-var-lib-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171518 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-ovnkube-config\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171575 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-run-ovn\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171525 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-host-cni-bin\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.171807 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-env-overrides\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.172187 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c8e76699-472b-4945-b1b3-47f55c922ec9-ovnkube-script-lib\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.172213 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c8e76699-472b-4945-b1b3-47f55c922ec9-etc-openvswitch\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.176120 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c8e76699-472b-4945-b1b3-47f55c922ec9-ovn-node-metrics-cert\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.189533 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn7d7\" (UniqueName: \"kubernetes.io/projected/c8e76699-472b-4945-b1b3-47f55c922ec9-kube-api-access-dn7d7\") pod \"ovnkube-node-7wk86\" (UID: \"c8e76699-472b-4945-b1b3-47f55c922ec9\") " pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.282043 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:45 crc kubenswrapper[4721]: W0130 21:28:45.305844 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8e76699_472b_4945_b1b3_47f55c922ec9.slice/crio-ae490313674da84f13d4692726a174fbee32df0e2962978415f2dde594e12e73 WatchSource:0}: Error finding container ae490313674da84f13d4692726a174fbee32df0e2962978415f2dde594e12e73: Status 404 returned error can't find the container with id ae490313674da84f13d4692726a174fbee32df0e2962978415f2dde594e12e73 Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.448576 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" event={"ID":"5c65d4e6-4f21-4298-a63e-7390c6588e3f","Type":"ContainerStarted","Data":"5ea3e3505f5e6b47cbcf7b584996387853293d48bf2282fcb8bce3a3764bb2ae"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.459669 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" event={"ID":"e7ea1c63-c2af-4258-9864-8e09c708d507","Type":"ContainerStarted","Data":"4b5b21cf925f02df21b8d9de7ee97278eddea73ab0fb272e53967c7920f4f060"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.460572 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.464136 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" event={"ID":"90e03b69-5f4f-469a-a8a0-82bc942a47e7","Type":"ContainerStarted","Data":"412cb46d5dd73eb29435a0e892699cd0caca14a536dd27aad8ed8e669f2f6e22"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.467550 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" event={"ID":"64ed6731-ff9c-4a61-b696-00dcac24cb8d","Type":"ContainerStarted","Data":"f2c8c88b765f6c9219f3e070a395415b2a67ac414edd1584747a2abb4e81c42c"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.470158 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/2.log" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.477674 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" event={"ID":"6007c115-e448-4886-9aa2-14a72217c0bd","Type":"ContainerStarted","Data":"a69c39aca1e9ae8a881c0cb7472971ea148400ee26fb6ba9c6ebf100b561f13e"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.478187 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.480536 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"5f50bfc282be735956b8cdb3089982fbe1c03633b8bc9f9ed52f5423b4a790c1"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.480589 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"ae490313674da84f13d4692726a174fbee32df0e2962978415f2dde594e12e73"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.503866 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-98r68" podStartSLOduration=2.316145582 podStartE2EDuration="15.503832545s" podCreationTimestamp="2026-01-30 21:28:30 +0000 UTC" firstStartedPulling="2026-01-30 21:28:31.577853728 +0000 UTC m=+700.369754974" lastFinishedPulling="2026-01-30 21:28:44.765540691 +0000 UTC m=+713.557441937" observedRunningTime="2026-01-30 21:28:45.474669683 +0000 UTC m=+714.266570949" watchObservedRunningTime="2026-01-30 21:28:45.503832545 +0000 UTC m=+714.295733801" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.516075 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovn-acl-logging/0.log" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.516910 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2p5n5_f295c622-6366-498b-b846-24316b3ad5b7/ovn-controller/0.log" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.523144 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.523371 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" event={"ID":"f295c622-6366-498b-b846-24316b3ad5b7","Type":"ContainerDied","Data":"ea98a05c679ef185e8c1e8324f7981eff20cc54258aa6fc5116a944a68d12632"} Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.523408 4721 scope.go:117] "RemoveContainer" containerID="2d6a1248435f7b74f1093fccf453e5e79a2b66fb79882ee43d359ff897c280e2" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.523618 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2p5n5" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.552756 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" podStartSLOduration=1.705105458 podStartE2EDuration="14.552731295s" podCreationTimestamp="2026-01-30 21:28:31 +0000 UTC" firstStartedPulling="2026-01-30 21:28:31.83782787 +0000 UTC m=+700.629729116" lastFinishedPulling="2026-01-30 21:28:44.685453707 +0000 UTC m=+713.477354953" observedRunningTime="2026-01-30 21:28:45.544871423 +0000 UTC m=+714.336772679" watchObservedRunningTime="2026-01-30 21:28:45.552731295 +0000 UTC m=+714.344632541" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.586673 4721 scope.go:117] "RemoveContainer" containerID="08c882fb2bc15ed501af9017d175b4d2272acacdedb06f895ce637bc69fe087a" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.651027 4721 scope.go:117] "RemoveContainer" containerID="24c9687c32222b265e45f0d914b2e7fe533a22ad9e467b0c2adde4923a87bbbf" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.652874 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79f76f558b-25fb7" podStartSLOduration=2.581724437 podStartE2EDuration="15.652831849s" podCreationTimestamp="2026-01-30 21:28:30 +0000 UTC" firstStartedPulling="2026-01-30 21:28:31.837892722 +0000 UTC m=+700.629793968" lastFinishedPulling="2026-01-30 21:28:44.909000134 +0000 UTC m=+713.700901380" observedRunningTime="2026-01-30 21:28:45.651425195 +0000 UTC m=+714.443326441" watchObservedRunningTime="2026-01-30 21:28:45.652831849 +0000 UTC m=+714.444733115" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.655104 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-lxm2h" podStartSLOduration=2.826109229 podStartE2EDuration="15.655100279s" podCreationTimestamp="2026-01-30 21:28:30 +0000 UTC" firstStartedPulling="2026-01-30 21:28:31.856365524 +0000 UTC m=+700.648266770" lastFinishedPulling="2026-01-30 21:28:44.685356584 +0000 UTC m=+713.477257820" observedRunningTime="2026-01-30 21:28:45.60725183 +0000 UTC m=+714.399153076" watchObservedRunningTime="2026-01-30 21:28:45.655100279 +0000 UTC m=+714.447001525" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.677152 4721 scope.go:117] "RemoveContainer" containerID="6b15b07fef3b24c1e6f7646c812ede90f0e903ee6d49900aaa687453e3f17bc7" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.691889 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-rw7xg" podStartSLOduration=2.662558143 podStartE2EDuration="15.691873125s" podCreationTimestamp="2026-01-30 21:28:30 +0000 UTC" firstStartedPulling="2026-01-30 21:28:31.740652977 +0000 UTC m=+700.532554223" lastFinishedPulling="2026-01-30 21:28:44.769967959 +0000 UTC m=+713.561869205" observedRunningTime="2026-01-30 21:28:45.682305309 +0000 UTC m=+714.474206555" watchObservedRunningTime="2026-01-30 21:28:45.691873125 +0000 UTC m=+714.483774371" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.700510 4721 scope.go:117] "RemoveContainer" containerID="2b1c9c3b43597c3e00f58448c04d2ce525f0d0a2e9745491bb11a7f13d394d6c" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.722636 4721 scope.go:117] "RemoveContainer" containerID="17356c4b5e8937a245575c1ea75ec0a2b85b1f9b51c42cb7b53c880777f64323" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.755611 4721 scope.go:117] "RemoveContainer" containerID="dfeb6cb5eea9bd3a50767842fa6dbf4e5e34258115ef2eddce2efb33cdc3171d" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.799629 4721 scope.go:117] "RemoveContainer" containerID="a9b7d0f1df0b0b5eddf138ff8f6d046fb091bf68a0c0b897904c18a31ca4c3b5" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.834181 4721 scope.go:117] "RemoveContainer" containerID="95ea5fa49fd739e8dc6fd78a9bee4e756f34cd2a1c1b20129fc8d0bcd1b6d44a" Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.840060 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2p5n5"] Jan 30 21:28:45 crc kubenswrapper[4721]: I0130 21:28:45.845267 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2p5n5"] Jan 30 21:28:46 crc kubenswrapper[4721]: I0130 21:28:46.100342 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f295c622-6366-498b-b846-24316b3ad5b7" path="/var/lib/kubelet/pods/f295c622-6366-498b-b846-24316b3ad5b7/volumes" Jan 30 21:28:46 crc kubenswrapper[4721]: I0130 21:28:46.533323 4721 generic.go:334] "Generic (PLEG): container finished" podID="c8e76699-472b-4945-b1b3-47f55c922ec9" containerID="5f50bfc282be735956b8cdb3089982fbe1c03633b8bc9f9ed52f5423b4a790c1" exitCode=0 Jan 30 21:28:46 crc kubenswrapper[4721]: I0130 21:28:46.533427 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerDied","Data":"5f50bfc282be735956b8cdb3089982fbe1c03633b8bc9f9ed52f5423b4a790c1"} Jan 30 21:28:46 crc kubenswrapper[4721]: I0130 21:28:46.533468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"399b18dbe7b23d1b1bb611b0ec0d6e297bac9d2845be494f57c5c959b19a1127"} Jan 30 21:28:46 crc kubenswrapper[4721]: I0130 21:28:46.533479 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"aab9f7ebcb6289962a159b11eb6e1973a23fd2c05e1fc187326901fa6969cb12"} Jan 30 21:28:47 crc kubenswrapper[4721]: I0130 21:28:47.559804 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"34298ffc3d632de114841666665f042b897ee918f4b6be75251b1f13d48555c0"} Jan 30 21:28:47 crc kubenswrapper[4721]: I0130 21:28:47.559888 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"a26b7a9845683177e34ac273f9b659ffeefc992d04f2ae54b9e4386ad04d2953"} Jan 30 21:28:47 crc kubenswrapper[4721]: I0130 21:28:47.559904 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"5c8c9e0788577db0ec5e0b3b165eff9c2a20c42fc6ff898e370fa31c05cef559"} Jan 30 21:28:47 crc kubenswrapper[4721]: I0130 21:28:47.559913 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"0b393dd85a3c36fba2d75f54ad0f26caf40bd8bcfee1844afd175cd812f126ed"} Jan 30 21:28:49 crc kubenswrapper[4721]: I0130 21:28:49.580943 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"ab03af0fadca4c9e84de0f38d0705bf7d90988265e580d1ffb4315f6734d9e08"} Jan 30 21:28:51 crc kubenswrapper[4721]: I0130 21:28:51.536509 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-hnrqb" Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.616632 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" event={"ID":"c8e76699-472b-4945-b1b3-47f55c922ec9","Type":"ContainerStarted","Data":"8da5948daf39ff292325d9ce3f2db16e1e8df03d060a1cc49ad5485c0ec70050"} Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.617154 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.617227 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.617317 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.666886 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.671229 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" podStartSLOduration=8.671212174 podStartE2EDuration="8.671212174s" podCreationTimestamp="2026-01-30 21:28:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:28:52.66815836 +0000 UTC m=+721.460059606" watchObservedRunningTime="2026-01-30 21:28:52.671212174 +0000 UTC m=+721.463113420" Jan 30 21:28:52 crc kubenswrapper[4721]: I0130 21:28:52.747530 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.950125 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr"] Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.951491 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.953323 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.953923 4721 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-zwb69" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.954322 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.960546 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-dwdzb"] Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.961720 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.964314 4721 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-2sbj6" Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.977575 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-7wqvq"] Jan 30 21:28:53 crc kubenswrapper[4721]: I0130 21:28:53.979792 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.004498 4721 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-wsl2r" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.024726 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-dwdzb"] Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.048363 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr"] Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.089730 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-7wqvq"] Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.120479 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99bl8\" (UniqueName: \"kubernetes.io/projected/02aa71d8-1558-4083-b360-d40f9bd180fb-kube-api-access-99bl8\") pod \"cert-manager-cainjector-cf98fcc89-kv2jr\" (UID: \"02aa71d8-1558-4083-b360-d40f9bd180fb\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.120865 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcfj5\" (UniqueName: \"kubernetes.io/projected/d4f08327-9c54-4b81-a397-77de365f3c7d-kube-api-access-fcfj5\") pod \"cert-manager-858654f9db-dwdzb\" (UID: \"d4f08327-9c54-4b81-a397-77de365f3c7d\") " pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.120960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxhck\" (UniqueName: \"kubernetes.io/projected/7a28e689-3208-4314-a5d9-c06c110c2482-kube-api-access-dxhck\") pod \"cert-manager-webhook-687f57d79b-7wqvq\" (UID: \"7a28e689-3208-4314-a5d9-c06c110c2482\") " pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.222799 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99bl8\" (UniqueName: \"kubernetes.io/projected/02aa71d8-1558-4083-b360-d40f9bd180fb-kube-api-access-99bl8\") pod \"cert-manager-cainjector-cf98fcc89-kv2jr\" (UID: \"02aa71d8-1558-4083-b360-d40f9bd180fb\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.222910 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcfj5\" (UniqueName: \"kubernetes.io/projected/d4f08327-9c54-4b81-a397-77de365f3c7d-kube-api-access-fcfj5\") pod \"cert-manager-858654f9db-dwdzb\" (UID: \"d4f08327-9c54-4b81-a397-77de365f3c7d\") " pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.222956 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxhck\" (UniqueName: \"kubernetes.io/projected/7a28e689-3208-4314-a5d9-c06c110c2482-kube-api-access-dxhck\") pod \"cert-manager-webhook-687f57d79b-7wqvq\" (UID: \"7a28e689-3208-4314-a5d9-c06c110c2482\") " pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.271693 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcfj5\" (UniqueName: \"kubernetes.io/projected/d4f08327-9c54-4b81-a397-77de365f3c7d-kube-api-access-fcfj5\") pod \"cert-manager-858654f9db-dwdzb\" (UID: \"d4f08327-9c54-4b81-a397-77de365f3c7d\") " pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.273048 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxhck\" (UniqueName: \"kubernetes.io/projected/7a28e689-3208-4314-a5d9-c06c110c2482-kube-api-access-dxhck\") pod \"cert-manager-webhook-687f57d79b-7wqvq\" (UID: \"7a28e689-3208-4314-a5d9-c06c110c2482\") " pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.283230 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99bl8\" (UniqueName: \"kubernetes.io/projected/02aa71d8-1558-4083-b360-d40f9bd180fb-kube-api-access-99bl8\") pod \"cert-manager-cainjector-cf98fcc89-kv2jr\" (UID: \"02aa71d8-1558-4083-b360-d40f9bd180fb\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.297600 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.308176 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.345592 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(21a5a6284fbbb8542a3fee02807357b19649a97fc70d0f6ca00e99f1e86bfbb5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.345693 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(21a5a6284fbbb8542a3fee02807357b19649a97fc70d0f6ca00e99f1e86bfbb5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.345725 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(21a5a6284fbbb8542a3fee02807357b19649a97fc70d0f6ca00e99f1e86bfbb5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.345786 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-858654f9db-dwdzb_cert-manager(d4f08327-9c54-4b81-a397-77de365f3c7d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-858654f9db-dwdzb_cert-manager(d4f08327-9c54-4b81-a397-77de365f3c7d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(21a5a6284fbbb8542a3fee02807357b19649a97fc70d0f6ca00e99f1e86bfbb5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-858654f9db-dwdzb" podUID="d4f08327-9c54-4b81-a397-77de365f3c7d" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.369581 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(aec2f1fd12257dfc60c4069809e0eaf0668aed6ba74ed8fcc262bea1e346072a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.369672 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(aec2f1fd12257dfc60c4069809e0eaf0668aed6ba74ed8fcc262bea1e346072a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.369702 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(aec2f1fd12257dfc60c4069809e0eaf0668aed6ba74ed8fcc262bea1e346072a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.369753 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-webhook-687f57d79b-7wqvq_cert-manager(7a28e689-3208-4314-a5d9-c06c110c2482)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-webhook-687f57d79b-7wqvq_cert-manager(7a28e689-3208-4314-a5d9-c06c110c2482)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(aec2f1fd12257dfc60c4069809e0eaf0668aed6ba74ed8fcc262bea1e346072a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" podUID="7a28e689-3208-4314-a5d9-c06c110c2482" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.579818 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.607351 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(4a395188f35069919108e2c2d5bc61a841c80cf66366d9e53b9f310c572caba7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.607484 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(4a395188f35069919108e2c2d5bc61a841c80cf66366d9e53b9f310c572caba7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.607512 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(4a395188f35069919108e2c2d5bc61a841c80cf66366d9e53b9f310c572caba7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.607580 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager(02aa71d8-1558-4083-b360-d40f9bd180fb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager(02aa71d8-1558-4083-b360-d40f9bd180fb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(4a395188f35069919108e2c2d5bc61a841c80cf66366d9e53b9f310c572caba7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" podUID="02aa71d8-1558-4083-b360-d40f9bd180fb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.638088 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.638315 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.638500 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.638678 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.638816 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: I0130 21:28:54.639014 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.698793 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(881e564a7a14d5739d09ba21e1d5bf873fc8543df727d5f9f97d5e717e9dbe8e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.699438 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(881e564a7a14d5739d09ba21e1d5bf873fc8543df727d5f9f97d5e717e9dbe8e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.699471 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(881e564a7a14d5739d09ba21e1d5bf873fc8543df727d5f9f97d5e717e9dbe8e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.699538 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-858654f9db-dwdzb_cert-manager(d4f08327-9c54-4b81-a397-77de365f3c7d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-858654f9db-dwdzb_cert-manager(d4f08327-9c54-4b81-a397-77de365f3c7d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(881e564a7a14d5739d09ba21e1d5bf873fc8543df727d5f9f97d5e717e9dbe8e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-858654f9db-dwdzb" podUID="d4f08327-9c54-4b81-a397-77de365f3c7d" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.719592 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(ad49b92f4a3bd3bb3c7094f1d5698091ee56ad88052156942a20ca36a40a7b57): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.719688 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(ad49b92f4a3bd3bb3c7094f1d5698091ee56ad88052156942a20ca36a40a7b57): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.719718 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(ad49b92f4a3bd3bb3c7094f1d5698091ee56ad88052156942a20ca36a40a7b57): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.719776 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-webhook-687f57d79b-7wqvq_cert-manager(7a28e689-3208-4314-a5d9-c06c110c2482)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-webhook-687f57d79b-7wqvq_cert-manager(7a28e689-3208-4314-a5d9-c06c110c2482)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(ad49b92f4a3bd3bb3c7094f1d5698091ee56ad88052156942a20ca36a40a7b57): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" podUID="7a28e689-3208-4314-a5d9-c06c110c2482" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.732157 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(13b5e721c27066bc191cb1b1f36cabc54f9c6ae9bffca3f80f37c2829a1de8be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.732251 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(13b5e721c27066bc191cb1b1f36cabc54f9c6ae9bffca3f80f37c2829a1de8be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.732279 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(13b5e721c27066bc191cb1b1f36cabc54f9c6ae9bffca3f80f37c2829a1de8be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:28:54 crc kubenswrapper[4721]: E0130 21:28:54.732366 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager(02aa71d8-1558-4083-b360-d40f9bd180fb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager(02aa71d8-1558-4083-b360-d40f9bd180fb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(13b5e721c27066bc191cb1b1f36cabc54f9c6ae9bffca3f80f37c2829a1de8be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" podUID="02aa71d8-1558-4083-b360-d40f9bd180fb" Jan 30 21:28:55 crc kubenswrapper[4721]: I0130 21:28:55.092168 4721 scope.go:117] "RemoveContainer" containerID="b7bb92494e4fef088d7d6741d3a0314fed401e904d1675f21988157c35a6a12c" Jan 30 21:28:55 crc kubenswrapper[4721]: E0130 21:28:55.092433 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-g7fgc_openshift-multus(62d4c2ec-791a-4f32-8ba0-118cac4e72e5)\"" pod="openshift-multus/multus-g7fgc" podUID="62d4c2ec-791a-4f32-8ba0-118cac4e72e5" Jan 30 21:28:59 crc kubenswrapper[4721]: I0130 21:28:59.448703 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:28:59 crc kubenswrapper[4721]: I0130 21:28:59.449265 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:29:07 crc kubenswrapper[4721]: I0130 21:29:07.092278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:29:07 crc kubenswrapper[4721]: I0130 21:29:07.093610 4721 scope.go:117] "RemoveContainer" containerID="b7bb92494e4fef088d7d6741d3a0314fed401e904d1675f21988157c35a6a12c" Jan 30 21:29:07 crc kubenswrapper[4721]: I0130 21:29:07.092526 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:07 crc kubenswrapper[4721]: I0130 21:29:07.094135 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:29:07 crc kubenswrapper[4721]: I0130 21:29:07.094421 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.165841 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(96c43507155f05143d6adea7c3ace644cf7d1bf8abde53146f5765a65bb27791): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.165968 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(96c43507155f05143d6adea7c3ace644cf7d1bf8abde53146f5765a65bb27791): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.166005 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(96c43507155f05143d6adea7c3ace644cf7d1bf8abde53146f5765a65bb27791): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.166137 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-webhook-687f57d79b-7wqvq_cert-manager(7a28e689-3208-4314-a5d9-c06c110c2482)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-webhook-687f57d79b-7wqvq_cert-manager(7a28e689-3208-4314-a5d9-c06c110c2482)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-webhook-687f57d79b-7wqvq_cert-manager_7a28e689-3208-4314-a5d9-c06c110c2482_0(96c43507155f05143d6adea7c3ace644cf7d1bf8abde53146f5765a65bb27791): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" podUID="7a28e689-3208-4314-a5d9-c06c110c2482" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.184047 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(19013d0eba91f066291837ae80001e285d92627c679163faf09d1e0f8cbcd28d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.184155 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(19013d0eba91f066291837ae80001e285d92627c679163faf09d1e0f8cbcd28d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.184216 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(19013d0eba91f066291837ae80001e285d92627c679163faf09d1e0f8cbcd28d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:29:07 crc kubenswrapper[4721]: E0130 21:29:07.184321 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-858654f9db-dwdzb_cert-manager(d4f08327-9c54-4b81-a397-77de365f3c7d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-858654f9db-dwdzb_cert-manager(d4f08327-9c54-4b81-a397-77de365f3c7d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-858654f9db-dwdzb_cert-manager_d4f08327-9c54-4b81-a397-77de365f3c7d_0(19013d0eba91f066291837ae80001e285d92627c679163faf09d1e0f8cbcd28d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-858654f9db-dwdzb" podUID="d4f08327-9c54-4b81-a397-77de365f3c7d" Jan 30 21:29:08 crc kubenswrapper[4721]: I0130 21:29:08.740455 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g7fgc_62d4c2ec-791a-4f32-8ba0-118cac4e72e5/kube-multus/2.log" Jan 30 21:29:08 crc kubenswrapper[4721]: I0130 21:29:08.740882 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g7fgc" event={"ID":"62d4c2ec-791a-4f32-8ba0-118cac4e72e5","Type":"ContainerStarted","Data":"fef159d54766453e2b25f58b87810ad112665b9a6eb613ad6e4a0a5c3a741076"} Jan 30 21:29:09 crc kubenswrapper[4721]: I0130 21:29:09.092294 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:29:09 crc kubenswrapper[4721]: I0130 21:29:09.092932 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:29:09 crc kubenswrapper[4721]: E0130 21:29:09.147275 4721 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(84297d9b79ccffc95aecacb086e390bfaf61873fd2dfb316e45e45617113ee5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 21:29:09 crc kubenswrapper[4721]: E0130 21:29:09.147379 4721 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(84297d9b79ccffc95aecacb086e390bfaf61873fd2dfb316e45e45617113ee5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:29:09 crc kubenswrapper[4721]: E0130 21:29:09.147412 4721 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(84297d9b79ccffc95aecacb086e390bfaf61873fd2dfb316e45e45617113ee5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:29:09 crc kubenswrapper[4721]: E0130 21:29:09.147518 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager(02aa71d8-1558-4083-b360-d40f9bd180fb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager(02aa71d8-1558-4083-b360-d40f9bd180fb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_cert-manager-cainjector-cf98fcc89-kv2jr_cert-manager_02aa71d8-1558-4083-b360-d40f9bd180fb_0(84297d9b79ccffc95aecacb086e390bfaf61873fd2dfb316e45e45617113ee5c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" podUID="02aa71d8-1558-4083-b360-d40f9bd180fb" Jan 30 21:29:15 crc kubenswrapper[4721]: I0130 21:29:15.331542 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7wk86" Jan 30 21:29:18 crc kubenswrapper[4721]: I0130 21:29:18.101358 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:29:18 crc kubenswrapper[4721]: I0130 21:29:18.103022 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-dwdzb" Jan 30 21:29:18 crc kubenswrapper[4721]: I0130 21:29:18.397864 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-dwdzb"] Jan 30 21:29:18 crc kubenswrapper[4721]: I0130 21:29:18.840627 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-dwdzb" event={"ID":"d4f08327-9c54-4b81-a397-77de365f3c7d","Type":"ContainerStarted","Data":"3c57f99109fd1e25ed68861801c8d86a2b672bd49cecf172401b993aa102c4c9"} Jan 30 21:29:19 crc kubenswrapper[4721]: I0130 21:29:19.091823 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:19 crc kubenswrapper[4721]: I0130 21:29:19.092511 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:19 crc kubenswrapper[4721]: I0130 21:29:19.600790 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-7wqvq"] Jan 30 21:29:19 crc kubenswrapper[4721]: W0130 21:29:19.757046 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a28e689_3208_4314_a5d9_c06c110c2482.slice/crio-d40897e4edf288ce7fa7853dd011d944604cf7ae3728a772293050961fe63575 WatchSource:0}: Error finding container d40897e4edf288ce7fa7853dd011d944604cf7ae3728a772293050961fe63575: Status 404 returned error can't find the container with id d40897e4edf288ce7fa7853dd011d944604cf7ae3728a772293050961fe63575 Jan 30 21:29:19 crc kubenswrapper[4721]: I0130 21:29:19.851794 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" event={"ID":"7a28e689-3208-4314-a5d9-c06c110c2482","Type":"ContainerStarted","Data":"d40897e4edf288ce7fa7853dd011d944604cf7ae3728a772293050961fe63575"} Jan 30 21:29:21 crc kubenswrapper[4721]: I0130 21:29:21.868169 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-dwdzb" event={"ID":"d4f08327-9c54-4b81-a397-77de365f3c7d","Type":"ContainerStarted","Data":"c5ce39f4ea0838d4840f529577fc6714a9b86dc3fb1fb1320878b6d29a2ac8dc"} Jan 30 21:29:21 crc kubenswrapper[4721]: I0130 21:29:21.897007 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-dwdzb" podStartSLOduration=25.985064423 podStartE2EDuration="28.896980591s" podCreationTimestamp="2026-01-30 21:28:53 +0000 UTC" firstStartedPulling="2026-01-30 21:29:18.411438128 +0000 UTC m=+747.203339424" lastFinishedPulling="2026-01-30 21:29:21.323354336 +0000 UTC m=+750.115255592" observedRunningTime="2026-01-30 21:29:21.895154785 +0000 UTC m=+750.687056041" watchObservedRunningTime="2026-01-30 21:29:21.896980591 +0000 UTC m=+750.688881837" Jan 30 21:29:22 crc kubenswrapper[4721]: I0130 21:29:22.878708 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" event={"ID":"7a28e689-3208-4314-a5d9-c06c110c2482","Type":"ContainerStarted","Data":"53270c7ab9b1b877ab2c824dd56a2b80e7c2f39de9939f63d6f6848c86aeeb81"} Jan 30 21:29:22 crc kubenswrapper[4721]: I0130 21:29:22.924489 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" podStartSLOduration=27.109278521 podStartE2EDuration="29.92444805s" podCreationTimestamp="2026-01-30 21:28:53 +0000 UTC" firstStartedPulling="2026-01-30 21:29:19.760970869 +0000 UTC m=+748.552872115" lastFinishedPulling="2026-01-30 21:29:22.576140378 +0000 UTC m=+751.368041644" observedRunningTime="2026-01-30 21:29:22.916126033 +0000 UTC m=+751.708027309" watchObservedRunningTime="2026-01-30 21:29:22.92444805 +0000 UTC m=+751.716349346" Jan 30 21:29:23 crc kubenswrapper[4721]: I0130 21:29:23.091849 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:29:23 crc kubenswrapper[4721]: I0130 21:29:23.093205 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" Jan 30 21:29:23 crc kubenswrapper[4721]: I0130 21:29:23.390563 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr"] Jan 30 21:29:23 crc kubenswrapper[4721]: W0130 21:29:23.403597 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02aa71d8_1558_4083_b360_d40f9bd180fb.slice/crio-98e6d01b4d062a80cee5542ddb2e6f2f9ad7a2a6f2ecbc6584a86abdb6bfa00a WatchSource:0}: Error finding container 98e6d01b4d062a80cee5542ddb2e6f2f9ad7a2a6f2ecbc6584a86abdb6bfa00a: Status 404 returned error can't find the container with id 98e6d01b4d062a80cee5542ddb2e6f2f9ad7a2a6f2ecbc6584a86abdb6bfa00a Jan 30 21:29:23 crc kubenswrapper[4721]: I0130 21:29:23.890564 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" event={"ID":"02aa71d8-1558-4083-b360-d40f9bd180fb","Type":"ContainerStarted","Data":"98e6d01b4d062a80cee5542ddb2e6f2f9ad7a2a6f2ecbc6584a86abdb6bfa00a"} Jan 30 21:29:23 crc kubenswrapper[4721]: I0130 21:29:23.890792 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:25 crc kubenswrapper[4721]: I0130 21:29:25.908759 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" event={"ID":"02aa71d8-1558-4083-b360-d40f9bd180fb","Type":"ContainerStarted","Data":"ecfd6801f4bd16b65b8a497a52e3cd36ffaeff6e9b1cea682f9204a4ddfa49ed"} Jan 30 21:29:25 crc kubenswrapper[4721]: I0130 21:29:25.933109 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-kv2jr" podStartSLOduration=31.370992047 podStartE2EDuration="32.933052185s" podCreationTimestamp="2026-01-30 21:28:53 +0000 UTC" firstStartedPulling="2026-01-30 21:29:23.407467716 +0000 UTC m=+752.199369002" lastFinishedPulling="2026-01-30 21:29:24.969527854 +0000 UTC m=+753.761429140" observedRunningTime="2026-01-30 21:29:25.925884794 +0000 UTC m=+754.717786050" watchObservedRunningTime="2026-01-30 21:29:25.933052185 +0000 UTC m=+754.724953441" Jan 30 21:29:28 crc kubenswrapper[4721]: I0130 21:29:28.117800 4721 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 30 21:29:29 crc kubenswrapper[4721]: I0130 21:29:29.312814 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-7wqvq" Jan 30 21:29:29 crc kubenswrapper[4721]: I0130 21:29:29.448705 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:29:29 crc kubenswrapper[4721]: I0130 21:29:29.448789 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.148501 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8"] Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.150832 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.154422 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.157860 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8"] Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.191846 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-util\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.192026 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-bundle\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.192392 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v2jz\" (UniqueName: \"kubernetes.io/projected/2790d917-3de8-4c45-8848-e1df3854c716-kube-api-access-8v2jz\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.293711 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v2jz\" (UniqueName: \"kubernetes.io/projected/2790d917-3de8-4c45-8848-e1df3854c716-kube-api-access-8v2jz\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.293894 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-util\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.293948 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-bundle\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.294717 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-util\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.294928 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-bundle\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.329616 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v2jz\" (UniqueName: \"kubernetes.io/projected/2790d917-3de8-4c45-8848-e1df3854c716-kube-api-access-8v2jz\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.481546 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:29:58 crc kubenswrapper[4721]: I0130 21:29:58.829950 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8"] Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.207217 4721 generic.go:334] "Generic (PLEG): container finished" podID="2790d917-3de8-4c45-8848-e1df3854c716" containerID="9a89e8a4585742dcd3cc1085b7f3d091d5492b07e06d78baccef6c4c0d020710" exitCode=0 Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.207404 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" event={"ID":"2790d917-3de8-4c45-8848-e1df3854c716","Type":"ContainerDied","Data":"9a89e8a4585742dcd3cc1085b7f3d091d5492b07e06d78baccef6c4c0d020710"} Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.208728 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" event={"ID":"2790d917-3de8-4c45-8848-e1df3854c716","Type":"ContainerStarted","Data":"d834080d7754fdef709a22aa5442145b2edd0c2077cff8a3a3701629b58be7d4"} Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.448627 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.448731 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.448839 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.450008 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"02acc2ce27e27177088d6c3748fc2d939b3d52222280b1e9d41a45b1ef083f4a"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:29:59 crc kubenswrapper[4721]: I0130 21:29:59.450096 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://02acc2ce27e27177088d6c3748fc2d939b3d52222280b1e9d41a45b1ef083f4a" gracePeriod=600 Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.155430 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.157135 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.160271 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.160488 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.170845 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.219708 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="02acc2ce27e27177088d6c3748fc2d939b3d52222280b1e9d41a45b1ef083f4a" exitCode=0 Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.219777 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"02acc2ce27e27177088d6c3748fc2d939b3d52222280b1e9d41a45b1ef083f4a"} Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.219825 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"7d6a021bc68a61b4be67915db3a0b2b9c9493a1fa6b81ad7a502063e8657996f"} Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.219855 4721 scope.go:117] "RemoveContainer" containerID="d7ee160d63cdd492388107118b59475b29b882ae5e61afaaca4166b1fbfadf4b" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.325112 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-secret-volume\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.325727 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvks5\" (UniqueName: \"kubernetes.io/projected/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-kube-api-access-fvks5\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.325946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-config-volume\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.427324 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-config-volume\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.427417 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-secret-volume\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.427488 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvks5\" (UniqueName: \"kubernetes.io/projected/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-kube-api-access-fvks5\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.429564 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-config-volume\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.437762 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-secret-volume\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.469040 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvks5\" (UniqueName: \"kubernetes.io/projected/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-kube-api-access-fvks5\") pod \"collect-profiles-29496810-vqlxb\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.473270 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.476212 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.477719 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.484128 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.484651 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.484913 4721 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-r5xwp" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.494553 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p7rh7"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.496658 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.503422 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.521156 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p7rh7"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.635840 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") " pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.636002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-utilities\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.636050 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rtm7\" (UniqueName: \"kubernetes.io/projected/1b240294-795d-4733-bd00-b823764bb190-kube-api-access-6rtm7\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") " pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.636081 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-catalog-content\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.636114 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6294b\" (UniqueName: \"kubernetes.io/projected/118e3137-6a70-4388-9f38-647a138813a7-kube-api-access-6294b\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.738273 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") " pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.738365 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-utilities\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.738425 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rtm7\" (UniqueName: \"kubernetes.io/projected/1b240294-795d-4733-bd00-b823764bb190-kube-api-access-6rtm7\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") " pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.738467 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-catalog-content\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.738504 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6294b\" (UniqueName: \"kubernetes.io/projected/118e3137-6a70-4388-9f38-647a138813a7-kube-api-access-6294b\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.739260 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-catalog-content\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.739281 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-utilities\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.743779 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.743829 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a3c5437e5e660ee640b84f6f3a89a29f120705ecd50941d8e251a2a43af5e1ca/globalmount\"" pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.758361 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb"] Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.761881 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rtm7\" (UniqueName: \"kubernetes.io/projected/1b240294-795d-4733-bd00-b823764bb190-kube-api-access-6rtm7\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") " pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.779350 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6294b\" (UniqueName: \"kubernetes.io/projected/118e3137-6a70-4388-9f38-647a138813a7-kube-api-access-6294b\") pod \"redhat-operators-p7rh7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.802946 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf234642-4f77-4fa5-a2d7-717d1ed76ebd\") pod \"minio\" (UID: \"1b240294-795d-4733-bd00-b823764bb190\") " pod="minio-dev/minio" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.881366 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:00 crc kubenswrapper[4721]: I0130 21:30:00.881595 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.213255 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p7rh7"] Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.248822 4721 generic.go:334] "Generic (PLEG): container finished" podID="2790d917-3de8-4c45-8848-e1df3854c716" containerID="f1ed3b731a60817881849868bab52d1ec671f9109bad19d1256749eb9c0fad28" exitCode=0 Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.248890 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" event={"ID":"2790d917-3de8-4c45-8848-e1df3854c716","Type":"ContainerDied","Data":"f1ed3b731a60817881849868bab52d1ec671f9109bad19d1256749eb9c0fad28"} Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.272941 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerStarted","Data":"7d4e1e1bbcbf06ce3f02e869806f211ce785754a3086d06f77b99cf77ce76e3e"} Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.283579 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" event={"ID":"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca","Type":"ContainerStarted","Data":"d0e9ca4a2e13396d3ab7df216f4a804f733562f20fa8af8e000bcaccaacf4f9a"} Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.283707 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" event={"ID":"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca","Type":"ContainerStarted","Data":"098a62d87fadadfc4f686e69cf383ae9e9feebdc95b4caf00d090d84e016fa58"} Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.306008 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" podStartSLOduration=1.305985097 podStartE2EDuration="1.305985097s" podCreationTimestamp="2026-01-30 21:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:30:01.30002247 +0000 UTC m=+790.091923716" watchObservedRunningTime="2026-01-30 21:30:01.305985097 +0000 UTC m=+790.097886343" Jan 30 21:30:01 crc kubenswrapper[4721]: I0130 21:30:01.335006 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.305678 4721 generic.go:334] "Generic (PLEG): container finished" podID="2790d917-3de8-4c45-8848-e1df3854c716" containerID="504f8e46a4f1f8af3783291345c70c28c9ee205ee445afa0c855c992e843d15e" exitCode=0 Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.305754 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" event={"ID":"2790d917-3de8-4c45-8848-e1df3854c716","Type":"ContainerDied","Data":"504f8e46a4f1f8af3783291345c70c28c9ee205ee445afa0c855c992e843d15e"} Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.308485 4721 generic.go:334] "Generic (PLEG): container finished" podID="118e3137-6a70-4388-9f38-647a138813a7" containerID="b213ddc81843540ae9b051c64fe853c56dafc3c716c9a47319ad46905f908611" exitCode=0 Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.308554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerDied","Data":"b213ddc81843540ae9b051c64fe853c56dafc3c716c9a47319ad46905f908611"} Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.315129 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"1b240294-795d-4733-bd00-b823764bb190","Type":"ContainerStarted","Data":"5407ba0f59af592f21439c97750f1e07833076462c334b65a30a89ce88afaf1f"} Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.324172 4721 generic.go:334] "Generic (PLEG): container finished" podID="f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" containerID="d0e9ca4a2e13396d3ab7df216f4a804f733562f20fa8af8e000bcaccaacf4f9a" exitCode=0 Jan 30 21:30:02 crc kubenswrapper[4721]: I0130 21:30:02.324244 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" event={"ID":"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca","Type":"ContainerDied","Data":"d0e9ca4a2e13396d3ab7df216f4a804f733562f20fa8af8e000bcaccaacf4f9a"} Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.595755 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.679156 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.703178 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v2jz\" (UniqueName: \"kubernetes.io/projected/2790d917-3de8-4c45-8848-e1df3854c716-kube-api-access-8v2jz\") pod \"2790d917-3de8-4c45-8848-e1df3854c716\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.703244 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-util\") pod \"2790d917-3de8-4c45-8848-e1df3854c716\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.703308 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-secret-volume\") pod \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.703345 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-bundle\") pod \"2790d917-3de8-4c45-8848-e1df3854c716\" (UID: \"2790d917-3de8-4c45-8848-e1df3854c716\") " Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.703365 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvks5\" (UniqueName: \"kubernetes.io/projected/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-kube-api-access-fvks5\") pod \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.703401 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-config-volume\") pod \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\" (UID: \"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca\") " Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.704785 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-config-volume" (OuterVolumeSpecName: "config-volume") pod "f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" (UID: "f52e1d0c-42d1-4ae6-824a-46ebbfff9fca"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.706251 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-bundle" (OuterVolumeSpecName: "bundle") pod "2790d917-3de8-4c45-8848-e1df3854c716" (UID: "2790d917-3de8-4c45-8848-e1df3854c716"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.712463 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2790d917-3de8-4c45-8848-e1df3854c716-kube-api-access-8v2jz" (OuterVolumeSpecName: "kube-api-access-8v2jz") pod "2790d917-3de8-4c45-8848-e1df3854c716" (UID: "2790d917-3de8-4c45-8848-e1df3854c716"). InnerVolumeSpecName "kube-api-access-8v2jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.712544 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" (UID: "f52e1d0c-42d1-4ae6-824a-46ebbfff9fca"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.723190 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-util" (OuterVolumeSpecName: "util") pod "2790d917-3de8-4c45-8848-e1df3854c716" (UID: "2790d917-3de8-4c45-8848-e1df3854c716"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.724026 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-kube-api-access-fvks5" (OuterVolumeSpecName: "kube-api-access-fvks5") pod "f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" (UID: "f52e1d0c-42d1-4ae6-824a-46ebbfff9fca"). InnerVolumeSpecName "kube-api-access-fvks5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.805772 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.805895 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.805917 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvks5\" (UniqueName: \"kubernetes.io/projected/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-kube-api-access-fvks5\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.805937 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.805956 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v2jz\" (UniqueName: \"kubernetes.io/projected/2790d917-3de8-4c45-8848-e1df3854c716-kube-api-access-8v2jz\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:03 crc kubenswrapper[4721]: I0130 21:30:03.805972 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2790d917-3de8-4c45-8848-e1df3854c716-util\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:04 crc kubenswrapper[4721]: I0130 21:30:04.351070 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" event={"ID":"2790d917-3de8-4c45-8848-e1df3854c716","Type":"ContainerDied","Data":"d834080d7754fdef709a22aa5442145b2edd0c2077cff8a3a3701629b58be7d4"} Jan 30 21:30:04 crc kubenswrapper[4721]: I0130 21:30:04.351644 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d834080d7754fdef709a22aa5442145b2edd0c2077cff8a3a3701629b58be7d4" Jan 30 21:30:04 crc kubenswrapper[4721]: I0130 21:30:04.351127 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8" Jan 30 21:30:04 crc kubenswrapper[4721]: I0130 21:30:04.360977 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" event={"ID":"f52e1d0c-42d1-4ae6-824a-46ebbfff9fca","Type":"ContainerDied","Data":"098a62d87fadadfc4f686e69cf383ae9e9feebdc95b4caf00d090d84e016fa58"} Jan 30 21:30:04 crc kubenswrapper[4721]: I0130 21:30:04.361035 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098a62d87fadadfc4f686e69cf383ae9e9feebdc95b4caf00d090d84e016fa58" Jan 30 21:30:04 crc kubenswrapper[4721]: I0130 21:30:04.361139 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb" Jan 30 21:30:06 crc kubenswrapper[4721]: I0130 21:30:06.378592 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"1b240294-795d-4733-bd00-b823764bb190","Type":"ContainerStarted","Data":"7b302d53c6253b4f6d2f63f3ce11d5eaef24a7bbb278e428dd59335352e792c4"} Jan 30 21:30:06 crc kubenswrapper[4721]: I0130 21:30:06.382447 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerStarted","Data":"3cd83aeed0d78de99882f81c4014091b974f5a84bb467c61590337bfa23e1b93"} Jan 30 21:30:06 crc kubenswrapper[4721]: I0130 21:30:06.510706 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=5.241093997 podStartE2EDuration="9.510675259s" podCreationTimestamp="2026-01-30 21:29:57 +0000 UTC" firstStartedPulling="2026-01-30 21:30:01.358824793 +0000 UTC m=+790.150726039" lastFinishedPulling="2026-01-30 21:30:05.628406045 +0000 UTC m=+794.420307301" observedRunningTime="2026-01-30 21:30:06.467229777 +0000 UTC m=+795.259131023" watchObservedRunningTime="2026-01-30 21:30:06.510675259 +0000 UTC m=+795.302576505" Jan 30 21:30:07 crc kubenswrapper[4721]: I0130 21:30:07.393116 4721 generic.go:334] "Generic (PLEG): container finished" podID="118e3137-6a70-4388-9f38-647a138813a7" containerID="3cd83aeed0d78de99882f81c4014091b974f5a84bb467c61590337bfa23e1b93" exitCode=0 Jan 30 21:30:07 crc kubenswrapper[4721]: I0130 21:30:07.393239 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerDied","Data":"3cd83aeed0d78de99882f81c4014091b974f5a84bb467c61590337bfa23e1b93"} Jan 30 21:30:08 crc kubenswrapper[4721]: I0130 21:30:08.402929 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerStarted","Data":"8de8900500b6d9283a8c50dce1806f72c5f25b2a12920a34a8ad7ff7cc4e6ce7"} Jan 30 21:30:08 crc kubenswrapper[4721]: I0130 21:30:08.424856 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p7rh7" podStartSLOduration=2.740878618 podStartE2EDuration="8.424830297s" podCreationTimestamp="2026-01-30 21:30:00 +0000 UTC" firstStartedPulling="2026-01-30 21:30:02.313496787 +0000 UTC m=+791.105398033" lastFinishedPulling="2026-01-30 21:30:07.997448426 +0000 UTC m=+796.789349712" observedRunningTime="2026-01-30 21:30:08.419583472 +0000 UTC m=+797.211484728" watchObservedRunningTime="2026-01-30 21:30:08.424830297 +0000 UTC m=+797.216731543" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.339648 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c"] Jan 30 21:30:10 crc kubenswrapper[4721]: E0130 21:30:10.340566 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="pull" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.340587 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="pull" Jan 30 21:30:10 crc kubenswrapper[4721]: E0130 21:30:10.340611 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" containerName="collect-profiles" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.340619 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" containerName="collect-profiles" Jan 30 21:30:10 crc kubenswrapper[4721]: E0130 21:30:10.340642 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="util" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.340651 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="util" Jan 30 21:30:10 crc kubenswrapper[4721]: E0130 21:30:10.340663 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="extract" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.340670 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="extract" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.340840 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2790d917-3de8-4c45-8848-e1df3854c716" containerName="extract" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.340855 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" containerName="collect-profiles" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.341743 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.346616 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.346623 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.346988 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.347389 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-sspfb" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.347579 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.348536 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.372253 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c"] Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.412631 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjz8l\" (UniqueName: \"kubernetes.io/projected/c86bc11f-3071-4387-9368-da8a53cc69b8-kube-api-access-fjz8l\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.412946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-webhook-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.413051 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.413175 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-apiservice-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.413378 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/c86bc11f-3071-4387-9368-da8a53cc69b8-manager-config\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.514475 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-webhook-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.514538 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.514560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-apiservice-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.514598 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/c86bc11f-3071-4387-9368-da8a53cc69b8-manager-config\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.514639 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjz8l\" (UniqueName: \"kubernetes.io/projected/c86bc11f-3071-4387-9368-da8a53cc69b8-kube-api-access-fjz8l\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.515765 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/c86bc11f-3071-4387-9368-da8a53cc69b8-manager-config\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.521855 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-apiservice-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.522417 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-webhook-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.522978 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c86bc11f-3071-4387-9368-da8a53cc69b8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.533288 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjz8l\" (UniqueName: \"kubernetes.io/projected/c86bc11f-3071-4387-9368-da8a53cc69b8-kube-api-access-fjz8l\") pod \"loki-operator-controller-manager-5d697845c-gth7c\" (UID: \"c86bc11f-3071-4387-9368-da8a53cc69b8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.657529 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.882440 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:10 crc kubenswrapper[4721]: I0130 21:30:10.882794 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:11 crc kubenswrapper[4721]: I0130 21:30:11.035183 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c"] Jan 30 21:30:11 crc kubenswrapper[4721]: W0130 21:30:11.048856 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc86bc11f_3071_4387_9368_da8a53cc69b8.slice/crio-866cc38a436a3f34e5ffd6c00fdc3c1d32e7dfd013ad7d308b2267ac48086df7 WatchSource:0}: Error finding container 866cc38a436a3f34e5ffd6c00fdc3c1d32e7dfd013ad7d308b2267ac48086df7: Status 404 returned error can't find the container with id 866cc38a436a3f34e5ffd6c00fdc3c1d32e7dfd013ad7d308b2267ac48086df7 Jan 30 21:30:11 crc kubenswrapper[4721]: I0130 21:30:11.423123 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" event={"ID":"c86bc11f-3071-4387-9368-da8a53cc69b8","Type":"ContainerStarted","Data":"866cc38a436a3f34e5ffd6c00fdc3c1d32e7dfd013ad7d308b2267ac48086df7"} Jan 30 21:30:11 crc kubenswrapper[4721]: I0130 21:30:11.932964 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p7rh7" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="registry-server" probeResult="failure" output=< Jan 30 21:30:11 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:30:11 crc kubenswrapper[4721]: > Jan 30 21:30:19 crc kubenswrapper[4721]: I0130 21:30:19.480587 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" event={"ID":"c86bc11f-3071-4387-9368-da8a53cc69b8","Type":"ContainerStarted","Data":"4e4417e2b9097353e21e871e6a2332fc96383ae76e2ec6aaa25267b3e23183fb"} Jan 30 21:30:20 crc kubenswrapper[4721]: I0130 21:30:20.923687 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:20 crc kubenswrapper[4721]: I0130 21:30:20.983554 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:21 crc kubenswrapper[4721]: I0130 21:30:21.860465 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p7rh7"] Jan 30 21:30:22 crc kubenswrapper[4721]: I0130 21:30:22.501328 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p7rh7" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="registry-server" containerID="cri-o://8de8900500b6d9283a8c50dce1806f72c5f25b2a12920a34a8ad7ff7cc4e6ce7" gracePeriod=2 Jan 30 21:30:23 crc kubenswrapper[4721]: I0130 21:30:23.512944 4721 generic.go:334] "Generic (PLEG): container finished" podID="118e3137-6a70-4388-9f38-647a138813a7" containerID="8de8900500b6d9283a8c50dce1806f72c5f25b2a12920a34a8ad7ff7cc4e6ce7" exitCode=0 Jan 30 21:30:23 crc kubenswrapper[4721]: I0130 21:30:23.513045 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerDied","Data":"8de8900500b6d9283a8c50dce1806f72c5f25b2a12920a34a8ad7ff7cc4e6ce7"} Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.062350 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.204216 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-utilities\") pod \"118e3137-6a70-4388-9f38-647a138813a7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.204441 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-catalog-content\") pod \"118e3137-6a70-4388-9f38-647a138813a7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.204527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6294b\" (UniqueName: \"kubernetes.io/projected/118e3137-6a70-4388-9f38-647a138813a7-kube-api-access-6294b\") pod \"118e3137-6a70-4388-9f38-647a138813a7\" (UID: \"118e3137-6a70-4388-9f38-647a138813a7\") " Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.207270 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-utilities" (OuterVolumeSpecName: "utilities") pod "118e3137-6a70-4388-9f38-647a138813a7" (UID: "118e3137-6a70-4388-9f38-647a138813a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.213651 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/118e3137-6a70-4388-9f38-647a138813a7-kube-api-access-6294b" (OuterVolumeSpecName: "kube-api-access-6294b") pod "118e3137-6a70-4388-9f38-647a138813a7" (UID: "118e3137-6a70-4388-9f38-647a138813a7"). InnerVolumeSpecName "kube-api-access-6294b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.306716 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6294b\" (UniqueName: \"kubernetes.io/projected/118e3137-6a70-4388-9f38-647a138813a7-kube-api-access-6294b\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.306756 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.347863 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "118e3137-6a70-4388-9f38-647a138813a7" (UID: "118e3137-6a70-4388-9f38-647a138813a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.409098 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/118e3137-6a70-4388-9f38-647a138813a7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.539867 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" event={"ID":"c86bc11f-3071-4387-9368-da8a53cc69b8","Type":"ContainerStarted","Data":"630aa33f64e77a814369164b3b1213990562612195c16eec26fdbd27a41355d1"} Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.541690 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.546172 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.546746 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p7rh7" event={"ID":"118e3137-6a70-4388-9f38-647a138813a7","Type":"ContainerDied","Data":"7d4e1e1bbcbf06ce3f02e869806f211ce785754a3086d06f77b99cf77ce76e3e"} Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.546857 4721 scope.go:117] "RemoveContainer" containerID="8de8900500b6d9283a8c50dce1806f72c5f25b2a12920a34a8ad7ff7cc4e6ce7" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.546882 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p7rh7" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.579069 4721 scope.go:117] "RemoveContainer" containerID="3cd83aeed0d78de99882f81c4014091b974f5a84bb467c61590337bfa23e1b93" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.586882 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-5d697845c-gth7c" podStartSLOduration=1.7559961849999999 podStartE2EDuration="16.586862483s" podCreationTimestamp="2026-01-30 21:30:10 +0000 UTC" firstStartedPulling="2026-01-30 21:30:11.053283132 +0000 UTC m=+799.845184378" lastFinishedPulling="2026-01-30 21:30:25.88414943 +0000 UTC m=+814.676050676" observedRunningTime="2026-01-30 21:30:26.581809275 +0000 UTC m=+815.373710611" watchObservedRunningTime="2026-01-30 21:30:26.586862483 +0000 UTC m=+815.378763739" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.614822 4721 scope.go:117] "RemoveContainer" containerID="b213ddc81843540ae9b051c64fe853c56dafc3c716c9a47319ad46905f908611" Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.670407 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p7rh7"] Jan 30 21:30:26 crc kubenswrapper[4721]: I0130 21:30:26.677905 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p7rh7"] Jan 30 21:30:28 crc kubenswrapper[4721]: I0130 21:30:28.103331 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="118e3137-6a70-4388-9f38-647a138813a7" path="/var/lib/kubelet/pods/118e3137-6a70-4388-9f38-647a138813a7/volumes" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.205714 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sxsrx"] Jan 30 21:31:05 crc kubenswrapper[4721]: E0130 21:31:05.206891 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="registry-server" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.206909 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="registry-server" Jan 30 21:31:05 crc kubenswrapper[4721]: E0130 21:31:05.206920 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="extract-utilities" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.206927 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="extract-utilities" Jan 30 21:31:05 crc kubenswrapper[4721]: E0130 21:31:05.206936 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="extract-content" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.206944 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="extract-content" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.207053 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="118e3137-6a70-4388-9f38-647a138813a7" containerName="registry-server" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.208353 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.218800 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxsrx"] Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.409388 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-catalog-content\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.409994 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6zw6\" (UniqueName: \"kubernetes.io/projected/b7c954a6-33e6-46e7-937c-5b98265c58bb-kube-api-access-j6zw6\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.410199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-utilities\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.512247 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6zw6\" (UniqueName: \"kubernetes.io/projected/b7c954a6-33e6-46e7-937c-5b98265c58bb-kube-api-access-j6zw6\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.512349 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-utilities\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.512407 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-catalog-content\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.512914 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-catalog-content\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.513585 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-utilities\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.536888 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6zw6\" (UniqueName: \"kubernetes.io/projected/b7c954a6-33e6-46e7-937c-5b98265c58bb-kube-api-access-j6zw6\") pod \"redhat-marketplace-sxsrx\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.539377 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:05 crc kubenswrapper[4721]: I0130 21:31:05.984529 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxsrx"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.379353 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.390759 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae7734ptv8"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.403241 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.411791 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08qwdmg"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.421751 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8vkbb"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.422285 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8vkbb" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="registry-server" containerID="cri-o://6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886" gracePeriod=30 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.428731 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h24b6"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.429264 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h24b6" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="registry-server" containerID="cri-o://fb1cea13519959ac21954e85737e98bc028dd045eceafbf562105892552eae6d" gracePeriod=30 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.453812 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szvpk"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.454194 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" podUID="5099f2b0-69e9-481d-8cb7-c70144258515" containerName="marketplace-operator" containerID="cri-o://03c9af54a0e5c552f41d9a6633810d4c5a3402a92ee63ad795111ccffb2cb611" gracePeriod=30 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.466423 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-crqs7"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.466809 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-crqs7" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="registry-server" containerID="cri-o://1100cb35367d56229f82220aedb1d30a28df760abce0daee5d11f4b368891d37" gracePeriod=30 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.475277 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jjn55"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.476697 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.482533 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxsrx"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.501362 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8szh7"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.501828 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8szh7" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="registry-server" containerID="cri-o://22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe" gracePeriod=30 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.511464 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jjn55"] Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.634486 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-954b2\" (UniqueName: \"kubernetes.io/projected/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-kube-api-access-954b2\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.634933 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.634976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.735875 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-954b2\" (UniqueName: \"kubernetes.io/projected/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-kube-api-access-954b2\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.735945 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.735969 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.738058 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.743843 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.754048 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-954b2\" (UniqueName: \"kubernetes.io/projected/8da1faa6-dbea-4a4c-a83d-b6a51551ab85-kube-api-access-954b2\") pod \"marketplace-operator-79b997595-jjn55\" (UID: \"8da1faa6-dbea-4a4c-a83d-b6a51551ab85\") " pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.903558 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.909940 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.919846 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940130 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-catalog-content\") pod \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940229 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-utilities\") pod \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940268 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqhqr\" (UniqueName: \"kubernetes.io/projected/98c82411-35ff-4d85-9ddf-c65a5454ec6b-kube-api-access-sqhqr\") pod \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\" (UID: \"98c82411-35ff-4d85-9ddf-c65a5454ec6b\") " Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940317 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-utilities\") pod \"0401e150-6ab2-4094-8523-472d73c16449\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940379 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68jcj\" (UniqueName: \"kubernetes.io/projected/0401e150-6ab2-4094-8523-472d73c16449-kube-api-access-68jcj\") pod \"0401e150-6ab2-4094-8523-472d73c16449\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940522 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-catalog-content\") pod \"0401e150-6ab2-4094-8523-472d73c16449\" (UID: \"0401e150-6ab2-4094-8523-472d73c16449\") " Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940508 4721 generic.go:334] "Generic (PLEG): container finished" podID="0401e150-6ab2-4094-8523-472d73c16449" containerID="6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886" exitCode=0 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940639 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerDied","Data":"6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940683 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8vkbb" event={"ID":"0401e150-6ab2-4094-8523-472d73c16449","Type":"ContainerDied","Data":"2a9196ce11939834a510e71fcce5a2e50f1cfbcc12875b99cc95128928a7746c"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940710 4721 scope.go:117] "RemoveContainer" containerID="6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.940922 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8vkbb" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.942800 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-utilities" (OuterVolumeSpecName: "utilities") pod "0401e150-6ab2-4094-8523-472d73c16449" (UID: "0401e150-6ab2-4094-8523-472d73c16449"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.944382 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-utilities" (OuterVolumeSpecName: "utilities") pod "98c82411-35ff-4d85-9ddf-c65a5454ec6b" (UID: "98c82411-35ff-4d85-9ddf-c65a5454ec6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.947331 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0401e150-6ab2-4094-8523-472d73c16449-kube-api-access-68jcj" (OuterVolumeSpecName: "kube-api-access-68jcj") pod "0401e150-6ab2-4094-8523-472d73c16449" (UID: "0401e150-6ab2-4094-8523-472d73c16449"). InnerVolumeSpecName "kube-api-access-68jcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.948556 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98c82411-35ff-4d85-9ddf-c65a5454ec6b-kube-api-access-sqhqr" (OuterVolumeSpecName: "kube-api-access-sqhqr") pod "98c82411-35ff-4d85-9ddf-c65a5454ec6b" (UID: "98c82411-35ff-4d85-9ddf-c65a5454ec6b"). InnerVolumeSpecName "kube-api-access-sqhqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.948900 4721 generic.go:334] "Generic (PLEG): container finished" podID="5099f2b0-69e9-481d-8cb7-c70144258515" containerID="03c9af54a0e5c552f41d9a6633810d4c5a3402a92ee63ad795111ccffb2cb611" exitCode=0 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.948974 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" event={"ID":"5099f2b0-69e9-481d-8cb7-c70144258515","Type":"ContainerDied","Data":"03c9af54a0e5c552f41d9a6633810d4c5a3402a92ee63ad795111ccffb2cb611"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.966166 4721 generic.go:334] "Generic (PLEG): container finished" podID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerID="22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe" exitCode=0 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.966271 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerDied","Data":"22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.966331 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8szh7" event={"ID":"98c82411-35ff-4d85-9ddf-c65a5454ec6b","Type":"ContainerDied","Data":"304d68bbcbbccb9a4cb916fb432ae8609ecd0e312f483ec60e561040f4db8d8c"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.966426 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8szh7" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.973435 4721 generic.go:334] "Generic (PLEG): container finished" podID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerID="fb1cea13519959ac21954e85737e98bc028dd045eceafbf562105892552eae6d" exitCode=0 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.973512 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h24b6" event={"ID":"39cffd6b-fa7b-48fe-b3df-d312891d00d1","Type":"ContainerDied","Data":"fb1cea13519959ac21954e85737e98bc028dd045eceafbf562105892552eae6d"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.973507 4721 scope.go:117] "RemoveContainer" containerID="a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252" Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.976966 4721 generic.go:334] "Generic (PLEG): container finished" podID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerID="fb337665c24ff49adcd99cf75853c2e7e91ed624e176bb71f147a6059005b430" exitCode=0 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.977035 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxsrx" event={"ID":"b7c954a6-33e6-46e7-937c-5b98265c58bb","Type":"ContainerDied","Data":"fb337665c24ff49adcd99cf75853c2e7e91ed624e176bb71f147a6059005b430"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.977080 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxsrx" event={"ID":"b7c954a6-33e6-46e7-937c-5b98265c58bb","Type":"ContainerStarted","Data":"260a2840e42a60be2eebfed610fec6b3bfc6805af96483feb9bf7f313773a7f1"} Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.990626 4721 generic.go:334] "Generic (PLEG): container finished" podID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerID="1100cb35367d56229f82220aedb1d30a28df760abce0daee5d11f4b368891d37" exitCode=0 Jan 30 21:31:06 crc kubenswrapper[4721]: I0130 21:31:06.990723 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerDied","Data":"1100cb35367d56229f82220aedb1d30a28df760abce0daee5d11f4b368891d37"} Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.012330 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0401e150-6ab2-4094-8523-472d73c16449" (UID: "0401e150-6ab2-4094-8523-472d73c16449"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.028369 4721 scope.go:117] "RemoveContainer" containerID="894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.066231 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.066480 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.066503 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqhqr\" (UniqueName: \"kubernetes.io/projected/98c82411-35ff-4d85-9ddf-c65a5454ec6b-kube-api-access-sqhqr\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.066518 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0401e150-6ab2-4094-8523-472d73c16449-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.066996 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68jcj\" (UniqueName: \"kubernetes.io/projected/0401e150-6ab2-4094-8523-472d73c16449-kube-api-access-68jcj\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.087397 4721 scope.go:117] "RemoveContainer" containerID="6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886" Jan 30 21:31:07 crc kubenswrapper[4721]: E0130 21:31:07.088172 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886\": container with ID starting with 6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886 not found: ID does not exist" containerID="6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.088211 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886"} err="failed to get container status \"6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886\": rpc error: code = NotFound desc = could not find container \"6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886\": container with ID starting with 6bce550dffd6e4f597bcef0b933bf31ba70a3f4332775eba0357bfc9b2da9886 not found: ID does not exist" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.088232 4721 scope.go:117] "RemoveContainer" containerID="a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252" Jan 30 21:31:07 crc kubenswrapper[4721]: E0130 21:31:07.088689 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252\": container with ID starting with a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252 not found: ID does not exist" containerID="a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.088739 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252"} err="failed to get container status \"a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252\": rpc error: code = NotFound desc = could not find container \"a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252\": container with ID starting with a69f8e990a1113c96f5f1db7f9804b33e3c6179ad4b6e3b312f136b745e02252 not found: ID does not exist" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.088773 4721 scope.go:117] "RemoveContainer" containerID="894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4" Jan 30 21:31:07 crc kubenswrapper[4721]: E0130 21:31:07.089113 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4\": container with ID starting with 894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4 not found: ID does not exist" containerID="894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.089168 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4"} err="failed to get container status \"894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4\": rpc error: code = NotFound desc = could not find container \"894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4\": container with ID starting with 894914e40ac5f8f1336d4e201a7f9b0848d5b57521d535950969a7edd51f3df4 not found: ID does not exist" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.089214 4721 scope.go:117] "RemoveContainer" containerID="22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.104941 4721 scope.go:117] "RemoveContainer" containerID="3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.107382 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98c82411-35ff-4d85-9ddf-c65a5454ec6b" (UID: "98c82411-35ff-4d85-9ddf-c65a5454ec6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.131717 4721 scope.go:117] "RemoveContainer" containerID="3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.154068 4721 scope.go:117] "RemoveContainer" containerID="22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe" Jan 30 21:31:07 crc kubenswrapper[4721]: E0130 21:31:07.154554 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe\": container with ID starting with 22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe not found: ID does not exist" containerID="22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.154588 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe"} err="failed to get container status \"22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe\": rpc error: code = NotFound desc = could not find container \"22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe\": container with ID starting with 22069ed94cbd9d5f9f9d114d6f65fc63b6d174204f88b003255c111fcd760efe not found: ID does not exist" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.154614 4721 scope.go:117] "RemoveContainer" containerID="3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992" Jan 30 21:31:07 crc kubenswrapper[4721]: E0130 21:31:07.155243 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992\": container with ID starting with 3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992 not found: ID does not exist" containerID="3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.155263 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992"} err="failed to get container status \"3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992\": rpc error: code = NotFound desc = could not find container \"3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992\": container with ID starting with 3d25317e4eda56b8b164f284c939dd9ac83d849a53f66cc960ad52e5f50e7992 not found: ID does not exist" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.155312 4721 scope.go:117] "RemoveContainer" containerID="3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2" Jan 30 21:31:07 crc kubenswrapper[4721]: E0130 21:31:07.155536 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2\": container with ID starting with 3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2 not found: ID does not exist" containerID="3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.155553 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2"} err="failed to get container status \"3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2\": rpc error: code = NotFound desc = could not find container \"3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2\": container with ID starting with 3350f0c6e9fafba578cf646bad3043af025e4939c48d413524f413706296abc2 not found: ID does not exist" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.171065 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98c82411-35ff-4d85-9ddf-c65a5454ec6b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.282807 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8vkbb"] Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.285559 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8vkbb"] Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.310530 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8szh7"] Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.313145 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8szh7"] Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.386853 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.406870 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jjn55"] Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.408350 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:31:07 crc kubenswrapper[4721]: W0130 21:31:07.409470 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8da1faa6_dbea_4a4c_a83d_b6a51551ab85.slice/crio-71884921a84d0b9a30c52544f66d5d909fa85241f58b8b0bc086b19d8e3703fa WatchSource:0}: Error finding container 71884921a84d0b9a30c52544f66d5d909fa85241f58b8b0bc086b19d8e3703fa: Status 404 returned error can't find the container with id 71884921a84d0b9a30c52544f66d5d909fa85241f58b8b0bc086b19d8e3703fa Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.473015 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576490 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-catalog-content\") pod \"b239aaa0-3dcf-4562-82ef-efe1163a2808\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576549 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dt6z6\" (UniqueName: \"kubernetes.io/projected/b239aaa0-3dcf-4562-82ef-efe1163a2808-kube-api-access-dt6z6\") pod \"b239aaa0-3dcf-4562-82ef-efe1163a2808\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576618 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-catalog-content\") pod \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576652 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9g6s\" (UniqueName: \"kubernetes.io/projected/39cffd6b-fa7b-48fe-b3df-d312891d00d1-kube-api-access-t9g6s\") pod \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576720 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-utilities\") pod \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\" (UID: \"39cffd6b-fa7b-48fe-b3df-d312891d00d1\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576765 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-trusted-ca\") pod \"5099f2b0-69e9-481d-8cb7-c70144258515\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576816 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-utilities\") pod \"b239aaa0-3dcf-4562-82ef-efe1163a2808\" (UID: \"b239aaa0-3dcf-4562-82ef-efe1163a2808\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576884 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v97h8\" (UniqueName: \"kubernetes.io/projected/5099f2b0-69e9-481d-8cb7-c70144258515-kube-api-access-v97h8\") pod \"5099f2b0-69e9-481d-8cb7-c70144258515\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.576944 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-operator-metrics\") pod \"5099f2b0-69e9-481d-8cb7-c70144258515\" (UID: \"5099f2b0-69e9-481d-8cb7-c70144258515\") " Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.577974 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "5099f2b0-69e9-481d-8cb7-c70144258515" (UID: "5099f2b0-69e9-481d-8cb7-c70144258515"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.578254 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-utilities" (OuterVolumeSpecName: "utilities") pod "39cffd6b-fa7b-48fe-b3df-d312891d00d1" (UID: "39cffd6b-fa7b-48fe-b3df-d312891d00d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.579103 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-utilities" (OuterVolumeSpecName: "utilities") pod "b239aaa0-3dcf-4562-82ef-efe1163a2808" (UID: "b239aaa0-3dcf-4562-82ef-efe1163a2808"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.582593 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5099f2b0-69e9-481d-8cb7-c70144258515-kube-api-access-v97h8" (OuterVolumeSpecName: "kube-api-access-v97h8") pod "5099f2b0-69e9-481d-8cb7-c70144258515" (UID: "5099f2b0-69e9-481d-8cb7-c70144258515"). InnerVolumeSpecName "kube-api-access-v97h8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.583635 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39cffd6b-fa7b-48fe-b3df-d312891d00d1-kube-api-access-t9g6s" (OuterVolumeSpecName: "kube-api-access-t9g6s") pod "39cffd6b-fa7b-48fe-b3df-d312891d00d1" (UID: "39cffd6b-fa7b-48fe-b3df-d312891d00d1"). InnerVolumeSpecName "kube-api-access-t9g6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.583916 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "5099f2b0-69e9-481d-8cb7-c70144258515" (UID: "5099f2b0-69e9-481d-8cb7-c70144258515"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.588555 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b239aaa0-3dcf-4562-82ef-efe1163a2808-kube-api-access-dt6z6" (OuterVolumeSpecName: "kube-api-access-dt6z6") pod "b239aaa0-3dcf-4562-82ef-efe1163a2808" (UID: "b239aaa0-3dcf-4562-82ef-efe1163a2808"). InnerVolumeSpecName "kube-api-access-dt6z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.605470 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b239aaa0-3dcf-4562-82ef-efe1163a2808" (UID: "b239aaa0-3dcf-4562-82ef-efe1163a2808"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.637462 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39cffd6b-fa7b-48fe-b3df-d312891d00d1" (UID: "39cffd6b-fa7b-48fe-b3df-d312891d00d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678342 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678390 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678401 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dt6z6\" (UniqueName: \"kubernetes.io/projected/b239aaa0-3dcf-4562-82ef-efe1163a2808-kube-api-access-dt6z6\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678412 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678422 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9g6s\" (UniqueName: \"kubernetes.io/projected/39cffd6b-fa7b-48fe-b3df-d312891d00d1-kube-api-access-t9g6s\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678432 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39cffd6b-fa7b-48fe-b3df-d312891d00d1-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678443 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5099f2b0-69e9-481d-8cb7-c70144258515-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678453 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b239aaa0-3dcf-4562-82ef-efe1163a2808-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:07 crc kubenswrapper[4721]: I0130 21:31:07.678465 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v97h8\" (UniqueName: \"kubernetes.io/projected/5099f2b0-69e9-481d-8cb7-c70144258515-kube-api-access-v97h8\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.004470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-crqs7" event={"ID":"b239aaa0-3dcf-4562-82ef-efe1163a2808","Type":"ContainerDied","Data":"4a194d87584bccd4c41abf550d5d52decba0527448a6791c3bd6bd9c1f0b1dc5"} Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.004548 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-crqs7" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.004566 4721 scope.go:117] "RemoveContainer" containerID="1100cb35367d56229f82220aedb1d30a28df760abce0daee5d11f4b368891d37" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.018202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h24b6" event={"ID":"39cffd6b-fa7b-48fe-b3df-d312891d00d1","Type":"ContainerDied","Data":"459fd0384c5d77fcfe967715f023633dfd61a606b767a180e07f29a7b64fc670"} Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.018283 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h24b6" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.021912 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" event={"ID":"8da1faa6-dbea-4a4c-a83d-b6a51551ab85","Type":"ContainerStarted","Data":"79c1b39f24ec86f335505a3556c948800a62f086d99876f60e7d0e2858b2f245"} Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.021989 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" event={"ID":"8da1faa6-dbea-4a4c-a83d-b6a51551ab85","Type":"ContainerStarted","Data":"71884921a84d0b9a30c52544f66d5d909fa85241f58b8b0bc086b19d8e3703fa"} Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.023970 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.028071 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.029784 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" event={"ID":"5099f2b0-69e9-481d-8cb7-c70144258515","Type":"ContainerDied","Data":"60decc451c2118dce94df84716c9e0837a2b0f432d19f6475cc5112e6cc3d886"} Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.029788 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-szvpk" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.055334 4721 scope.go:117] "RemoveContainer" containerID="0acb986570e5f591297f9da683794272f26a9392005a1eab30571a212d8dfbcb" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.056192 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-jjn55" podStartSLOduration=2.056165948 podStartE2EDuration="2.056165948s" podCreationTimestamp="2026-01-30 21:31:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:31:08.055365953 +0000 UTC m=+856.847267209" watchObservedRunningTime="2026-01-30 21:31:08.056165948 +0000 UTC m=+856.848067184" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.087347 4721 scope.go:117] "RemoveContainer" containerID="ce0e66579bcad7b9dfb0e40baa037f1af715286594250b46f7f449bc5a25b0b2" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.108139 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0401e150-6ab2-4094-8523-472d73c16449" path="/var/lib/kubelet/pods/0401e150-6ab2-4094-8523-472d73c16449/volumes" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.109560 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2790d917-3de8-4c45-8848-e1df3854c716" path="/var/lib/kubelet/pods/2790d917-3de8-4c45-8848-e1df3854c716/volumes" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.110255 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="820b6616-f2c6-47f9-8ad6-0e196f9cd134" path="/var/lib/kubelet/pods/820b6616-f2c6-47f9-8ad6-0e196f9cd134/volumes" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.112351 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" path="/var/lib/kubelet/pods/98c82411-35ff-4d85-9ddf-c65a5454ec6b/volumes" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.121382 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h24b6"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.127259 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h24b6"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.134021 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-crqs7"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.138004 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-crqs7"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.138550 4721 scope.go:117] "RemoveContainer" containerID="fb1cea13519959ac21954e85737e98bc028dd045eceafbf562105892552eae6d" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.141938 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szvpk"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.151063 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szvpk"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.168704 4721 scope.go:117] "RemoveContainer" containerID="31a09101e180896c63605c07ac5e6993361efa1f7a9837c0abde727a6a764532" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.191647 4721 scope.go:117] "RemoveContainer" containerID="6fa3038d0e9d05a4acfadb13a3ece1b757faefaf8a03c23d0fec077f0df7d6c9" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.217673 4721 scope.go:117] "RemoveContainer" containerID="03c9af54a0e5c552f41d9a6633810d4c5a3402a92ee63ad795111ccffb2cb611" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.586877 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zc72v"] Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587253 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587279 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587326 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5099f2b0-69e9-481d-8cb7-c70144258515" containerName="marketplace-operator" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587339 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5099f2b0-69e9-481d-8cb7-c70144258515" containerName="marketplace-operator" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587359 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587372 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587393 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587407 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587430 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587442 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587461 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587474 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587495 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587508 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587522 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587533 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587551 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587563 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587585 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587597 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="extract-content" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587619 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587632 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587651 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587663 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: E0130 21:31:08.587680 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587692 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="extract-utilities" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587899 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5099f2b0-69e9-481d-8cb7-c70144258515" containerName="marketplace-operator" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587926 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587946 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587964 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0401e150-6ab2-4094-8523-472d73c16449" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.587987 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="98c82411-35ff-4d85-9ddf-c65a5454ec6b" containerName="registry-server" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.589857 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.600836 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zc72v"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.694101 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4cx2\" (UniqueName: \"kubernetes.io/projected/abb2e581-41fd-4409-893a-0f8a4b7ebd31-kube-api-access-k4cx2\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.694185 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb2e581-41fd-4409-893a-0f8a4b7ebd31-utilities\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.694263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb2e581-41fd-4409-893a-0f8a4b7ebd31-catalog-content\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.777861 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pn4j5"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.780238 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.783836 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.797733 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4cx2\" (UniqueName: \"kubernetes.io/projected/abb2e581-41fd-4409-893a-0f8a4b7ebd31-kube-api-access-k4cx2\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.797901 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb2e581-41fd-4409-893a-0f8a4b7ebd31-utilities\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.798058 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb2e581-41fd-4409-893a-0f8a4b7ebd31-catalog-content\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.800789 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb2e581-41fd-4409-893a-0f8a4b7ebd31-utilities\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.801229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb2e581-41fd-4409-893a-0f8a4b7ebd31-catalog-content\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.820486 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pn4j5"] Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.836999 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4cx2\" (UniqueName: \"kubernetes.io/projected/abb2e581-41fd-4409-893a-0f8a4b7ebd31-kube-api-access-k4cx2\") pod \"redhat-marketplace-zc72v\" (UID: \"abb2e581-41fd-4409-893a-0f8a4b7ebd31\") " pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.902137 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-utilities\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.902711 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fxgn\" (UniqueName: \"kubernetes.io/projected/bd89314e-6d14-49eb-9cf8-448f8fde27e1-kube-api-access-5fxgn\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.903114 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-catalog-content\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:08 crc kubenswrapper[4721]: I0130 21:31:08.986698 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.005308 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-catalog-content\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.005387 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-utilities\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.005460 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fxgn\" (UniqueName: \"kubernetes.io/projected/bd89314e-6d14-49eb-9cf8-448f8fde27e1-kube-api-access-5fxgn\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.006393 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-catalog-content\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.006446 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-utilities\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.043589 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fxgn\" (UniqueName: \"kubernetes.io/projected/bd89314e-6d14-49eb-9cf8-448f8fde27e1-kube-api-access-5fxgn\") pod \"certified-operators-pn4j5\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.059354 4721 generic.go:334] "Generic (PLEG): container finished" podID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerID="e0e579c02c81d111b8ff18c32b82eb13535c161783f226150a6a987c749bd43d" exitCode=0 Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.059499 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxsrx" event={"ID":"b7c954a6-33e6-46e7-937c-5b98265c58bb","Type":"ContainerDied","Data":"e0e579c02c81d111b8ff18c32b82eb13535c161783f226150a6a987c749bd43d"} Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.136940 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.186977 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sfndb"] Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.188880 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.208539 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sfndb"] Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.310453 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bwnx\" (UniqueName: \"kubernetes.io/projected/ee0bd386-6e49-41c9-a0ad-78c96374ed89-kube-api-access-8bwnx\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.310531 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-catalog-content\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.310581 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-utilities\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.382583 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.386940 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zc72v"] Jan 30 21:31:09 crc kubenswrapper[4721]: W0130 21:31:09.392815 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabb2e581_41fd_4409_893a_0f8a4b7ebd31.slice/crio-018941d6f90610e59d68903ff0435e370afbbab4e432ca98fee0c019792830f2 WatchSource:0}: Error finding container 018941d6f90610e59d68903ff0435e370afbbab4e432ca98fee0c019792830f2: Status 404 returned error can't find the container with id 018941d6f90610e59d68903ff0435e370afbbab4e432ca98fee0c019792830f2 Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.412205 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-catalog-content\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.412255 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-utilities\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.412360 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bwnx\" (UniqueName: \"kubernetes.io/projected/ee0bd386-6e49-41c9-a0ad-78c96374ed89-kube-api-access-8bwnx\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.413212 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-catalog-content\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.413874 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-utilities\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.442055 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bwnx\" (UniqueName: \"kubernetes.io/projected/ee0bd386-6e49-41c9-a0ad-78c96374ed89-kube-api-access-8bwnx\") pod \"certified-operators-sfndb\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.461924 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pn4j5"] Jan 30 21:31:09 crc kubenswrapper[4721]: W0130 21:31:09.464373 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd89314e_6d14_49eb_9cf8_448f8fde27e1.slice/crio-d4a9b96a919a6a31b5e9dca196a60a3ef4add047530a8bbd37ea8c199ac9f144 WatchSource:0}: Error finding container d4a9b96a919a6a31b5e9dca196a60a3ef4add047530a8bbd37ea8c199ac9f144: Status 404 returned error can't find the container with id d4a9b96a919a6a31b5e9dca196a60a3ef4add047530a8bbd37ea8c199ac9f144 Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.513327 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-utilities\") pod \"b7c954a6-33e6-46e7-937c-5b98265c58bb\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.513785 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6zw6\" (UniqueName: \"kubernetes.io/projected/b7c954a6-33e6-46e7-937c-5b98265c58bb-kube-api-access-j6zw6\") pod \"b7c954a6-33e6-46e7-937c-5b98265c58bb\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.514067 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-catalog-content\") pod \"b7c954a6-33e6-46e7-937c-5b98265c58bb\" (UID: \"b7c954a6-33e6-46e7-937c-5b98265c58bb\") " Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.514645 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-utilities" (OuterVolumeSpecName: "utilities") pod "b7c954a6-33e6-46e7-937c-5b98265c58bb" (UID: "b7c954a6-33e6-46e7-937c-5b98265c58bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.518706 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7c954a6-33e6-46e7-937c-5b98265c58bb-kube-api-access-j6zw6" (OuterVolumeSpecName: "kube-api-access-j6zw6") pod "b7c954a6-33e6-46e7-937c-5b98265c58bb" (UID: "b7c954a6-33e6-46e7-937c-5b98265c58bb"). InnerVolumeSpecName "kube-api-access-j6zw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.547361 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.556620 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7c954a6-33e6-46e7-937c-5b98265c58bb" (UID: "b7c954a6-33e6-46e7-937c-5b98265c58bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.616757 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.616794 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c954a6-33e6-46e7-937c-5b98265c58bb-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:09 crc kubenswrapper[4721]: I0130 21:31:09.616807 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6zw6\" (UniqueName: \"kubernetes.io/projected/b7c954a6-33e6-46e7-937c-5b98265c58bb-kube-api-access-j6zw6\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.019584 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sfndb"] Jan 30 21:31:10 crc kubenswrapper[4721]: W0130 21:31:10.030081 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee0bd386_6e49_41c9_a0ad_78c96374ed89.slice/crio-fb4b5c9e77213b23ca26d0150613475a2693c9b8ef73422506faa08322dd9aca WatchSource:0}: Error finding container fb4b5c9e77213b23ca26d0150613475a2693c9b8ef73422506faa08322dd9aca: Status 404 returned error can't find the container with id fb4b5c9e77213b23ca26d0150613475a2693c9b8ef73422506faa08322dd9aca Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.072146 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxsrx" event={"ID":"b7c954a6-33e6-46e7-937c-5b98265c58bb","Type":"ContainerDied","Data":"260a2840e42a60be2eebfed610fec6b3bfc6805af96483feb9bf7f313773a7f1"} Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.072221 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.072250 4721 scope.go:117] "RemoveContainer" containerID="e0e579c02c81d111b8ff18c32b82eb13535c161783f226150a6a987c749bd43d" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.077877 4721 generic.go:334] "Generic (PLEG): container finished" podID="abb2e581-41fd-4409-893a-0f8a4b7ebd31" containerID="0b4f283a5af18ff49ca904fccbd2a1278c8ed6f80ad73640044c49d8148535ac" exitCode=0 Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.077963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zc72v" event={"ID":"abb2e581-41fd-4409-893a-0f8a4b7ebd31","Type":"ContainerDied","Data":"0b4f283a5af18ff49ca904fccbd2a1278c8ed6f80ad73640044c49d8148535ac"} Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.077995 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zc72v" event={"ID":"abb2e581-41fd-4409-893a-0f8a4b7ebd31","Type":"ContainerStarted","Data":"018941d6f90610e59d68903ff0435e370afbbab4e432ca98fee0c019792830f2"} Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.088535 4721 generic.go:334] "Generic (PLEG): container finished" podID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerID="796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847" exitCode=0 Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.088644 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pn4j5" event={"ID":"bd89314e-6d14-49eb-9cf8-448f8fde27e1","Type":"ContainerDied","Data":"796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847"} Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.089134 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pn4j5" event={"ID":"bd89314e-6d14-49eb-9cf8-448f8fde27e1","Type":"ContainerStarted","Data":"d4a9b96a919a6a31b5e9dca196a60a3ef4add047530a8bbd37ea8c199ac9f144"} Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.116424 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39cffd6b-fa7b-48fe-b3df-d312891d00d1" path="/var/lib/kubelet/pods/39cffd6b-fa7b-48fe-b3df-d312891d00d1/volumes" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.117553 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5099f2b0-69e9-481d-8cb7-c70144258515" path="/var/lib/kubelet/pods/5099f2b0-69e9-481d-8cb7-c70144258515/volumes" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.118201 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b239aaa0-3dcf-4562-82ef-efe1163a2808" path="/var/lib/kubelet/pods/b239aaa0-3dcf-4562-82ef-efe1163a2808/volumes" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.119946 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfndb" event={"ID":"ee0bd386-6e49-41c9-a0ad-78c96374ed89","Type":"ContainerStarted","Data":"fb4b5c9e77213b23ca26d0150613475a2693c9b8ef73422506faa08322dd9aca"} Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.142332 4721 scope.go:117] "RemoveContainer" containerID="fb337665c24ff49adcd99cf75853c2e7e91ed624e176bb71f147a6059005b430" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.982967 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rh5tn"] Jan 30 21:31:10 crc kubenswrapper[4721]: E0130 21:31:10.983256 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerName="extract-content" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.983272 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerName="extract-content" Jan 30 21:31:10 crc kubenswrapper[4721]: E0130 21:31:10.983290 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerName="extract-utilities" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.983320 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerName="extract-utilities" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.983541 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" containerName="extract-content" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.984958 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:10 crc kubenswrapper[4721]: I0130 21:31:10.990937 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.007069 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rh5tn"] Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.111987 4721 generic.go:334] "Generic (PLEG): container finished" podID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerID="e8780133b01c3e411872a64b8983a791a7866ac5702caeb6b3d866e1f58491e4" exitCode=0 Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.112052 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfndb" event={"ID":"ee0bd386-6e49-41c9-a0ad-78c96374ed89","Type":"ContainerDied","Data":"e8780133b01c3e411872a64b8983a791a7866ac5702caeb6b3d866e1f58491e4"} Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.150977 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5rnc\" (UniqueName: \"kubernetes.io/projected/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-kube-api-access-k5rnc\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.151257 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-utilities\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.151387 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-catalog-content\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.258213 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-utilities\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.258367 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-catalog-content\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.258464 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5rnc\" (UniqueName: \"kubernetes.io/projected/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-kube-api-access-k5rnc\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.259470 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-catalog-content\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.259549 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-utilities\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.297376 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5rnc\" (UniqueName: \"kubernetes.io/projected/36b5da67-7fd7-4ddb-9f14-c8de51a88e05-kube-api-access-k5rnc\") pod \"community-operators-rh5tn\" (UID: \"36b5da67-7fd7-4ddb-9f14-c8de51a88e05\") " pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.324064 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.394930 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w58vb"] Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.402319 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.407726 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w58vb"] Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.563857 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-catalog-content\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.563924 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-utilities\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.563973 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpw5f\" (UniqueName: \"kubernetes.io/projected/41025bfc-99ed-4cac-a824-b63e3a222754-kube-api-access-rpw5f\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.571496 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xgdzf"] Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.572873 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.575424 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.586143 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xgdzf"] Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.665638 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-catalog-content\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.665708 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-utilities\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.665764 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpw5f\" (UniqueName: \"kubernetes.io/projected/41025bfc-99ed-4cac-a824-b63e3a222754-kube-api-access-rpw5f\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.668512 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-catalog-content\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.669567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-utilities\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.685736 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpw5f\" (UniqueName: \"kubernetes.io/projected/41025bfc-99ed-4cac-a824-b63e3a222754-kube-api-access-rpw5f\") pod \"community-operators-w58vb\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.770337 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbrwm\" (UniqueName: \"kubernetes.io/projected/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-kube-api-access-mbrwm\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.770407 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-utilities\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.770447 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-catalog-content\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.778240 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rh5tn"] Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.793248 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.872100 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbrwm\" (UniqueName: \"kubernetes.io/projected/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-kube-api-access-mbrwm\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.872150 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-utilities\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.872187 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-catalog-content\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.872792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-catalog-content\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.874742 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-utilities\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.896053 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbrwm\" (UniqueName: \"kubernetes.io/projected/7c95c9c5-93b7-4fe9-81fe-3a893a01fb29-kube-api-access-mbrwm\") pod \"redhat-operators-xgdzf\" (UID: \"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29\") " pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:11 crc kubenswrapper[4721]: I0130 21:31:11.945014 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.020827 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w58vb"] Jan 30 21:31:12 crc kubenswrapper[4721]: W0130 21:31:12.027174 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41025bfc_99ed_4cac_a824_b63e3a222754.slice/crio-11cb60e6038094a66d0e4da70fdb4ffb87c1a32f6c4de32468f421ea87358878 WatchSource:0}: Error finding container 11cb60e6038094a66d0e4da70fdb4ffb87c1a32f6c4de32468f421ea87358878: Status 404 returned error can't find the container with id 11cb60e6038094a66d0e4da70fdb4ffb87c1a32f6c4de32468f421ea87358878 Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.174193 4721 generic.go:334] "Generic (PLEG): container finished" podID="abb2e581-41fd-4409-893a-0f8a4b7ebd31" containerID="aa583d71a53974c9dcf095915ed4faf89b8371002ed3e51cb6f5a3e4c9f495ff" exitCode=0 Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.182560 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zc72v" event={"ID":"abb2e581-41fd-4409-893a-0f8a4b7ebd31","Type":"ContainerDied","Data":"aa583d71a53974c9dcf095915ed4faf89b8371002ed3e51cb6f5a3e4c9f495ff"} Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.186123 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh5tn" event={"ID":"36b5da67-7fd7-4ddb-9f14-c8de51a88e05","Type":"ContainerStarted","Data":"318d240b238c974bdec81a549633b490e2b345af20f3203696e301260a662c87"} Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.186154 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh5tn" event={"ID":"36b5da67-7fd7-4ddb-9f14-c8de51a88e05","Type":"ContainerStarted","Data":"d064fb0935720039e31b22da969234be3d8a0785dc657ee5b3f6f9182cc8143e"} Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.188631 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xgdzf"] Jan 30 21:31:12 crc kubenswrapper[4721]: I0130 21:31:12.194599 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w58vb" event={"ID":"41025bfc-99ed-4cac-a824-b63e3a222754","Type":"ContainerStarted","Data":"11cb60e6038094a66d0e4da70fdb4ffb87c1a32f6c4de32468f421ea87358878"} Jan 30 21:31:12 crc kubenswrapper[4721]: W0130 21:31:12.199152 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c95c9c5_93b7_4fe9_81fe_3a893a01fb29.slice/crio-6f33df369fc831b8f4e6eb481efcb32d99c8d40439a77245e19c23705153e383 WatchSource:0}: Error finding container 6f33df369fc831b8f4e6eb481efcb32d99c8d40439a77245e19c23705153e383: Status 404 returned error can't find the container with id 6f33df369fc831b8f4e6eb481efcb32d99c8d40439a77245e19c23705153e383 Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.202279 4721 generic.go:334] "Generic (PLEG): container finished" podID="41025bfc-99ed-4cac-a824-b63e3a222754" containerID="67923f79de347af8127f9932a4e0f00ae99e55cbe6583512d2751d04332d394a" exitCode=0 Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.202391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w58vb" event={"ID":"41025bfc-99ed-4cac-a824-b63e3a222754","Type":"ContainerDied","Data":"67923f79de347af8127f9932a4e0f00ae99e55cbe6583512d2751d04332d394a"} Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.206648 4721 generic.go:334] "Generic (PLEG): container finished" podID="7c95c9c5-93b7-4fe9-81fe-3a893a01fb29" containerID="bd678de77e866e72e77b405a8ba5e58f919530d8512fbbabdf4906646178e789" exitCode=0 Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.206720 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xgdzf" event={"ID":"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29","Type":"ContainerDied","Data":"bd678de77e866e72e77b405a8ba5e58f919530d8512fbbabdf4906646178e789"} Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.206749 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xgdzf" event={"ID":"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29","Type":"ContainerStarted","Data":"6f33df369fc831b8f4e6eb481efcb32d99c8d40439a77245e19c23705153e383"} Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.209195 4721 generic.go:334] "Generic (PLEG): container finished" podID="36b5da67-7fd7-4ddb-9f14-c8de51a88e05" containerID="318d240b238c974bdec81a549633b490e2b345af20f3203696e301260a662c87" exitCode=0 Jan 30 21:31:13 crc kubenswrapper[4721]: I0130 21:31:13.209246 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh5tn" event={"ID":"36b5da67-7fd7-4ddb-9f14-c8de51a88e05","Type":"ContainerDied","Data":"318d240b238c974bdec81a549633b490e2b345af20f3203696e301260a662c87"} Jan 30 21:31:17 crc kubenswrapper[4721]: I0130 21:31:17.248357 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zc72v" event={"ID":"abb2e581-41fd-4409-893a-0f8a4b7ebd31","Type":"ContainerStarted","Data":"03041e4be091326924db3c89e515dfa4fc135db38a6fa41a3af99c4b9d564aea"} Jan 30 21:31:17 crc kubenswrapper[4721]: I0130 21:31:17.288813 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zc72v" podStartSLOduration=4.414912141 podStartE2EDuration="9.288776759s" podCreationTimestamp="2026-01-30 21:31:08 +0000 UTC" firstStartedPulling="2026-01-30 21:31:10.079732844 +0000 UTC m=+858.871634100" lastFinishedPulling="2026-01-30 21:31:14.953597432 +0000 UTC m=+863.745498718" observedRunningTime="2026-01-30 21:31:17.281411079 +0000 UTC m=+866.073312385" watchObservedRunningTime="2026-01-30 21:31:17.288776759 +0000 UTC m=+866.080678045" Jan 30 21:31:18 crc kubenswrapper[4721]: I0130 21:31:18.262536 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh5tn" event={"ID":"36b5da67-7fd7-4ddb-9f14-c8de51a88e05","Type":"ContainerStarted","Data":"570ff52766b6e50b7a8ec1e59e37010488f70a1442016000badd7946121ceb72"} Jan 30 21:31:18 crc kubenswrapper[4721]: I0130 21:31:18.987831 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:18 crc kubenswrapper[4721]: I0130 21:31:18.987919 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.271171 4721 generic.go:334] "Generic (PLEG): container finished" podID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerID="251e695d05873dc40e986d27364667a045122bccf509fac782fc5ab951bf0ab3" exitCode=0 Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.271269 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfndb" event={"ID":"ee0bd386-6e49-41c9-a0ad-78c96374ed89","Type":"ContainerDied","Data":"251e695d05873dc40e986d27364667a045122bccf509fac782fc5ab951bf0ab3"} Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.275348 4721 generic.go:334] "Generic (PLEG): container finished" podID="41025bfc-99ed-4cac-a824-b63e3a222754" containerID="bb7a369b2e0bec38b385089f9520597bf22e4cbdd1bff6d0c3f013e443d5605c" exitCode=0 Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.275475 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w58vb" event={"ID":"41025bfc-99ed-4cac-a824-b63e3a222754","Type":"ContainerDied","Data":"bb7a369b2e0bec38b385089f9520597bf22e4cbdd1bff6d0c3f013e443d5605c"} Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.281753 4721 generic.go:334] "Generic (PLEG): container finished" podID="7c95c9c5-93b7-4fe9-81fe-3a893a01fb29" containerID="d55f03981bdc9f1e1c80beb0a94d5867e32a1df4f9178ff1cd59bb8670f160d9" exitCode=0 Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.281848 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xgdzf" event={"ID":"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29","Type":"ContainerDied","Data":"d55f03981bdc9f1e1c80beb0a94d5867e32a1df4f9178ff1cd59bb8670f160d9"} Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.287987 4721 generic.go:334] "Generic (PLEG): container finished" podID="36b5da67-7fd7-4ddb-9f14-c8de51a88e05" containerID="570ff52766b6e50b7a8ec1e59e37010488f70a1442016000badd7946121ceb72" exitCode=0 Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.288078 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh5tn" event={"ID":"36b5da67-7fd7-4ddb-9f14-c8de51a88e05","Type":"ContainerDied","Data":"570ff52766b6e50b7a8ec1e59e37010488f70a1442016000badd7946121ceb72"} Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.293379 4721 generic.go:334] "Generic (PLEG): container finished" podID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerID="c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f" exitCode=0 Jan 30 21:31:19 crc kubenswrapper[4721]: I0130 21:31:19.293471 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pn4j5" event={"ID":"bd89314e-6d14-49eb-9cf8-448f8fde27e1","Type":"ContainerDied","Data":"c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f"} Jan 30 21:31:20 crc kubenswrapper[4721]: I0130 21:31:20.055730 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-zc72v" podUID="abb2e581-41fd-4409-893a-0f8a4b7ebd31" containerName="registry-server" probeResult="failure" output=< Jan 30 21:31:20 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:31:20 crc kubenswrapper[4721]: > Jan 30 21:31:20 crc kubenswrapper[4721]: I0130 21:31:20.303821 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh5tn" event={"ID":"36b5da67-7fd7-4ddb-9f14-c8de51a88e05","Type":"ContainerStarted","Data":"cd093b0f6a17a59333ba114312c37f6517620348f4a340535b2847830378bcaf"} Jan 30 21:31:20 crc kubenswrapper[4721]: I0130 21:31:20.306108 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w58vb" event={"ID":"41025bfc-99ed-4cac-a824-b63e3a222754","Type":"ContainerStarted","Data":"b395c2969b79314770d958d2242c725c4b3f7dde265e309b735436bd7f30cd6a"} Jan 30 21:31:20 crc kubenswrapper[4721]: I0130 21:31:20.331860 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rh5tn" podStartSLOduration=2.407739008 podStartE2EDuration="10.331839929s" podCreationTimestamp="2026-01-30 21:31:10 +0000 UTC" firstStartedPulling="2026-01-30 21:31:12.188661628 +0000 UTC m=+860.980562874" lastFinishedPulling="2026-01-30 21:31:20.112762559 +0000 UTC m=+868.904663795" observedRunningTime="2026-01-30 21:31:20.328775213 +0000 UTC m=+869.120676469" watchObservedRunningTime="2026-01-30 21:31:20.331839929 +0000 UTC m=+869.123741175" Jan 30 21:31:20 crc kubenswrapper[4721]: I0130 21:31:20.357815 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w58vb" podStartSLOduration=2.6915475840000003 podStartE2EDuration="9.357789409s" podCreationTimestamp="2026-01-30 21:31:11 +0000 UTC" firstStartedPulling="2026-01-30 21:31:13.241656169 +0000 UTC m=+862.033557425" lastFinishedPulling="2026-01-30 21:31:19.907897954 +0000 UTC m=+868.699799250" observedRunningTime="2026-01-30 21:31:20.351626277 +0000 UTC m=+869.143527533" watchObservedRunningTime="2026-01-30 21:31:20.357789409 +0000 UTC m=+869.149690665" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.316119 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xgdzf" event={"ID":"7c95c9c5-93b7-4fe9-81fe-3a893a01fb29","Type":"ContainerStarted","Data":"792e0090ef76fa693275db56fce89cdc089210cff0d4c068c4da881f09544282"} Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.319703 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pn4j5" event={"ID":"bd89314e-6d14-49eb-9cf8-448f8fde27e1","Type":"ContainerStarted","Data":"8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373"} Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.325707 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.325761 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.333826 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfndb" event={"ID":"ee0bd386-6e49-41c9-a0ad-78c96374ed89","Type":"ContainerStarted","Data":"e661feb976d58366ddeab311f93389e13c7c81d06a46a74899046c3dcd137bca"} Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.384013 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xgdzf" podStartSLOduration=3.355652758 podStartE2EDuration="10.383988823s" podCreationTimestamp="2026-01-30 21:31:11 +0000 UTC" firstStartedPulling="2026-01-30 21:31:13.242043281 +0000 UTC m=+862.033944527" lastFinishedPulling="2026-01-30 21:31:20.270379346 +0000 UTC m=+869.062280592" observedRunningTime="2026-01-30 21:31:21.347758881 +0000 UTC m=+870.139660167" watchObservedRunningTime="2026-01-30 21:31:21.383988823 +0000 UTC m=+870.175890069" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.386883 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sfndb" podStartSLOduration=3.113116926 podStartE2EDuration="12.386875744s" podCreationTimestamp="2026-01-30 21:31:09 +0000 UTC" firstStartedPulling="2026-01-30 21:31:11.116479286 +0000 UTC m=+859.908380552" lastFinishedPulling="2026-01-30 21:31:20.390238124 +0000 UTC m=+869.182139370" observedRunningTime="2026-01-30 21:31:21.382238038 +0000 UTC m=+870.174139284" watchObservedRunningTime="2026-01-30 21:31:21.386875744 +0000 UTC m=+870.178776980" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.402448 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pn4j5" podStartSLOduration=2.985123469 podStartE2EDuration="13.402418099s" podCreationTimestamp="2026-01-30 21:31:08 +0000 UTC" firstStartedPulling="2026-01-30 21:31:10.091409738 +0000 UTC m=+858.883310984" lastFinishedPulling="2026-01-30 21:31:20.508704368 +0000 UTC m=+869.300605614" observedRunningTime="2026-01-30 21:31:21.402086559 +0000 UTC m=+870.193987805" watchObservedRunningTime="2026-01-30 21:31:21.402418099 +0000 UTC m=+870.194319345" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.793610 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.793687 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.946016 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:21 crc kubenswrapper[4721]: I0130 21:31:21.946173 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:22 crc kubenswrapper[4721]: I0130 21:31:22.392340 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-rh5tn" podUID="36b5da67-7fd7-4ddb-9f14-c8de51a88e05" containerName="registry-server" probeResult="failure" output=< Jan 30 21:31:22 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:31:22 crc kubenswrapper[4721]: > Jan 30 21:31:22 crc kubenswrapper[4721]: I0130 21:31:22.836969 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-w58vb" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="registry-server" probeResult="failure" output=< Jan 30 21:31:22 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:31:22 crc kubenswrapper[4721]: > Jan 30 21:31:22 crc kubenswrapper[4721]: I0130 21:31:22.990279 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xgdzf" podUID="7c95c9c5-93b7-4fe9-81fe-3a893a01fb29" containerName="registry-server" probeResult="failure" output=< Jan 30 21:31:22 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:31:22 crc kubenswrapper[4721]: > Jan 30 21:31:29 crc kubenswrapper[4721]: I0130 21:31:29.058449 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:29 crc kubenswrapper[4721]: I0130 21:31:29.128836 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zc72v" Jan 30 21:31:29 crc kubenswrapper[4721]: I0130 21:31:29.138179 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:29 crc kubenswrapper[4721]: I0130 21:31:29.138263 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:29 crc kubenswrapper[4721]: I0130 21:31:29.216586 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:29 crc kubenswrapper[4721]: I0130 21:31:29.474478 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:29.548354 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:29.552745 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:29.615836 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:30.483699 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:31.306033 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sfndb"] Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:31.399764 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:31.453466 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rh5tn" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:31.847672 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:31.903556 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:31.992402 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:32.030212 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xgdzf" Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:32.435412 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sfndb" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="registry-server" containerID="cri-o://e661feb976d58366ddeab311f93389e13c7c81d06a46a74899046c3dcd137bca" gracePeriod=2 Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:33.706233 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w58vb"] Jan 30 21:31:34 crc kubenswrapper[4721]: I0130 21:31:33.706716 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w58vb" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="registry-server" containerID="cri-o://b395c2969b79314770d958d2242c725c4b3f7dde265e309b735436bd7f30cd6a" gracePeriod=2 Jan 30 21:31:38 crc kubenswrapper[4721]: I0130 21:31:38.479096 4721 generic.go:334] "Generic (PLEG): container finished" podID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerID="e661feb976d58366ddeab311f93389e13c7c81d06a46a74899046c3dcd137bca" exitCode=0 Jan 30 21:31:38 crc kubenswrapper[4721]: I0130 21:31:38.479175 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfndb" event={"ID":"ee0bd386-6e49-41c9-a0ad-78c96374ed89","Type":"ContainerDied","Data":"e661feb976d58366ddeab311f93389e13c7c81d06a46a74899046c3dcd137bca"} Jan 30 21:31:38 crc kubenswrapper[4721]: I0130 21:31:38.481990 4721 generic.go:334] "Generic (PLEG): container finished" podID="41025bfc-99ed-4cac-a824-b63e3a222754" containerID="b395c2969b79314770d958d2242c725c4b3f7dde265e309b735436bd7f30cd6a" exitCode=0 Jan 30 21:31:38 crc kubenswrapper[4721]: I0130 21:31:38.482088 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w58vb" event={"ID":"41025bfc-99ed-4cac-a824-b63e3a222754","Type":"ContainerDied","Data":"b395c2969b79314770d958d2242c725c4b3f7dde265e309b735436bd7f30cd6a"} Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.193182 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.256009 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bwnx\" (UniqueName: \"kubernetes.io/projected/ee0bd386-6e49-41c9-a0ad-78c96374ed89-kube-api-access-8bwnx\") pod \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.256135 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-catalog-content\") pod \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.256198 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-utilities\") pod \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\" (UID: \"ee0bd386-6e49-41c9-a0ad-78c96374ed89\") " Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.257336 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-utilities" (OuterVolumeSpecName: "utilities") pod "ee0bd386-6e49-41c9-a0ad-78c96374ed89" (UID: "ee0bd386-6e49-41c9-a0ad-78c96374ed89"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.278533 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee0bd386-6e49-41c9-a0ad-78c96374ed89-kube-api-access-8bwnx" (OuterVolumeSpecName: "kube-api-access-8bwnx") pod "ee0bd386-6e49-41c9-a0ad-78c96374ed89" (UID: "ee0bd386-6e49-41c9-a0ad-78c96374ed89"). InnerVolumeSpecName "kube-api-access-8bwnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.307565 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee0bd386-6e49-41c9-a0ad-78c96374ed89" (UID: "ee0bd386-6e49-41c9-a0ad-78c96374ed89"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.358338 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.358402 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee0bd386-6e49-41c9-a0ad-78c96374ed89-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.358414 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bwnx\" (UniqueName: \"kubernetes.io/projected/ee0bd386-6e49-41c9-a0ad-78c96374ed89-kube-api-access-8bwnx\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.498067 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sfndb" event={"ID":"ee0bd386-6e49-41c9-a0ad-78c96374ed89","Type":"ContainerDied","Data":"fb4b5c9e77213b23ca26d0150613475a2693c9b8ef73422506faa08322dd9aca"} Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.498152 4721 scope.go:117] "RemoveContainer" containerID="e661feb976d58366ddeab311f93389e13c7c81d06a46a74899046c3dcd137bca" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.498369 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sfndb" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.543262 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sfndb"] Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.549701 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sfndb"] Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.563552 4721 scope.go:117] "RemoveContainer" containerID="251e695d05873dc40e986d27364667a045122bccf509fac782fc5ab951bf0ab3" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.592155 4721 scope.go:117] "RemoveContainer" containerID="e8780133b01c3e411872a64b8983a791a7866ac5702caeb6b3d866e1f58491e4" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.636362 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.764208 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-catalog-content\") pod \"41025bfc-99ed-4cac-a824-b63e3a222754\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.764406 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpw5f\" (UniqueName: \"kubernetes.io/projected/41025bfc-99ed-4cac-a824-b63e3a222754-kube-api-access-rpw5f\") pod \"41025bfc-99ed-4cac-a824-b63e3a222754\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.764555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-utilities\") pod \"41025bfc-99ed-4cac-a824-b63e3a222754\" (UID: \"41025bfc-99ed-4cac-a824-b63e3a222754\") " Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.766085 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-utilities" (OuterVolumeSpecName: "utilities") pod "41025bfc-99ed-4cac-a824-b63e3a222754" (UID: "41025bfc-99ed-4cac-a824-b63e3a222754"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.768278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41025bfc-99ed-4cac-a824-b63e3a222754-kube-api-access-rpw5f" (OuterVolumeSpecName: "kube-api-access-rpw5f") pod "41025bfc-99ed-4cac-a824-b63e3a222754" (UID: "41025bfc-99ed-4cac-a824-b63e3a222754"). InnerVolumeSpecName "kube-api-access-rpw5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.823506 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41025bfc-99ed-4cac-a824-b63e3a222754" (UID: "41025bfc-99ed-4cac-a824-b63e3a222754"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.867449 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.867508 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpw5f\" (UniqueName: \"kubernetes.io/projected/41025bfc-99ed-4cac-a824-b63e3a222754-kube-api-access-rpw5f\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:39 crc kubenswrapper[4721]: I0130 21:31:39.867524 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41025bfc-99ed-4cac-a824-b63e3a222754-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.102357 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" path="/var/lib/kubelet/pods/ee0bd386-6e49-41c9-a0ad-78c96374ed89/volumes" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.120510 4721 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","podb7c954a6-33e6-46e7-937c-5b98265c58bb"] err="unable to destroy cgroup paths for cgroup [kubepods burstable podb7c954a6-33e6-46e7-937c-5b98265c58bb] : Timed out while waiting for systemd to remove kubepods-burstable-podb7c954a6_33e6_46e7_937c_5b98265c58bb.slice" Jan 30 21:31:40 crc kubenswrapper[4721]: E0130 21:31:40.120604 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable podb7c954a6-33e6-46e7-937c-5b98265c58bb] : unable to destroy cgroup paths for cgroup [kubepods burstable podb7c954a6-33e6-46e7-937c-5b98265c58bb] : Timed out while waiting for systemd to remove kubepods-burstable-podb7c954a6_33e6_46e7_937c_5b98265c58bb.slice" pod="openshift-marketplace/redhat-marketplace-sxsrx" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.513252 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxsrx" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.513922 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w58vb" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.513946 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w58vb" event={"ID":"41025bfc-99ed-4cac-a824-b63e3a222754","Type":"ContainerDied","Data":"11cb60e6038094a66d0e4da70fdb4ffb87c1a32f6c4de32468f421ea87358878"} Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.514066 4721 scope.go:117] "RemoveContainer" containerID="b395c2969b79314770d958d2242c725c4b3f7dde265e309b735436bd7f30cd6a" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.534568 4721 scope.go:117] "RemoveContainer" containerID="bb7a369b2e0bec38b385089f9520597bf22e4cbdd1bff6d0c3f013e443d5605c" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.568718 4721 scope.go:117] "RemoveContainer" containerID="67923f79de347af8127f9932a4e0f00ae99e55cbe6583512d2751d04332d394a" Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.581761 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxsrx"] Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.588004 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxsrx"] Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.593431 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w58vb"] Jan 30 21:31:40 crc kubenswrapper[4721]: I0130 21:31:40.596050 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w58vb"] Jan 30 21:31:42 crc kubenswrapper[4721]: I0130 21:31:42.109391 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" path="/var/lib/kubelet/pods/41025bfc-99ed-4cac-a824-b63e3a222754/volumes" Jan 30 21:31:42 crc kubenswrapper[4721]: I0130 21:31:42.111688 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7c954a6-33e6-46e7-937c-5b98265c58bb" path="/var/lib/kubelet/pods/b7c954a6-33e6-46e7-937c-5b98265c58bb/volumes" Jan 30 21:31:59 crc kubenswrapper[4721]: I0130 21:31:59.448847 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:31:59 crc kubenswrapper[4721]: I0130 21:31:59.449897 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:32:29 crc kubenswrapper[4721]: I0130 21:32:29.448289 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:32:29 crc kubenswrapper[4721]: I0130 21:32:29.449452 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:32:59 crc kubenswrapper[4721]: I0130 21:32:59.449527 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:32:59 crc kubenswrapper[4721]: I0130 21:32:59.450513 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:32:59 crc kubenswrapper[4721]: I0130 21:32:59.450586 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:32:59 crc kubenswrapper[4721]: I0130 21:32:59.451183 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d6a021bc68a61b4be67915db3a0b2b9c9493a1fa6b81ad7a502063e8657996f"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:32:59 crc kubenswrapper[4721]: I0130 21:32:59.451247 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://7d6a021bc68a61b4be67915db3a0b2b9c9493a1fa6b81ad7a502063e8657996f" gracePeriod=600 Jan 30 21:33:00 crc kubenswrapper[4721]: I0130 21:33:00.191102 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="7d6a021bc68a61b4be67915db3a0b2b9c9493a1fa6b81ad7a502063e8657996f" exitCode=0 Jan 30 21:33:00 crc kubenswrapper[4721]: I0130 21:33:00.191186 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"7d6a021bc68a61b4be67915db3a0b2b9c9493a1fa6b81ad7a502063e8657996f"} Jan 30 21:33:00 crc kubenswrapper[4721]: I0130 21:33:00.192011 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"5a85b81456f4b90fe93c8c62d60cf026368f362c59b118e8c9a626253c6590b2"} Jan 30 21:33:00 crc kubenswrapper[4721]: I0130 21:33:00.192047 4721 scope.go:117] "RemoveContainer" containerID="02acc2ce27e27177088d6c3748fc2d939b3d52222280b1e9d41a45b1ef083f4a" Jan 30 21:34:52 crc kubenswrapper[4721]: I0130 21:34:52.722585 4721 scope.go:117] "RemoveContainer" containerID="b127f7d52c9d7b8b5e592bb285798253b6f5772559a5e472bd9319118dd955ff" Jan 30 21:34:52 crc kubenswrapper[4721]: I0130 21:34:52.751261 4721 scope.go:117] "RemoveContainer" containerID="0e0e838cf0eaed40994aa7c1389c82521ceb8da3027186bdb6688a96813ba776" Jan 30 21:34:52 crc kubenswrapper[4721]: I0130 21:34:52.784141 4721 scope.go:117] "RemoveContainer" containerID="9829b84a2ecf22682a5e30a82780d12fc53018c0b4e3588c51b13b958164e090" Jan 30 21:34:59 crc kubenswrapper[4721]: I0130 21:34:59.449256 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:34:59 crc kubenswrapper[4721]: I0130 21:34:59.450854 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:35:29 crc kubenswrapper[4721]: I0130 21:35:29.448929 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:35:29 crc kubenswrapper[4721]: I0130 21:35:29.449772 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.449411 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.450697 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.450808 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.452287 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5a85b81456f4b90fe93c8c62d60cf026368f362c59b118e8c9a626253c6590b2"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.452457 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://5a85b81456f4b90fe93c8c62d60cf026368f362c59b118e8c9a626253c6590b2" gracePeriod=600 Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.942871 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="5a85b81456f4b90fe93c8c62d60cf026368f362c59b118e8c9a626253c6590b2" exitCode=0 Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.942965 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"5a85b81456f4b90fe93c8c62d60cf026368f362c59b118e8c9a626253c6590b2"} Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.943467 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"42fade44689770dc48c8ac1fb73d5f9b5b655130e03ffb51c7329e0d230e8309"} Jan 30 21:35:59 crc kubenswrapper[4721]: I0130 21:35:59.943505 4721 scope.go:117] "RemoveContainer" containerID="7d6a021bc68a61b4be67915db3a0b2b9c9493a1fa6b81ad7a502063e8657996f" Jan 30 21:36:52 crc kubenswrapper[4721]: I0130 21:36:52.873806 4721 scope.go:117] "RemoveContainer" containerID="9a89e8a4585742dcd3cc1085b7f3d091d5492b07e06d78baccef6c4c0d020710" Jan 30 21:36:52 crc kubenswrapper[4721]: I0130 21:36:52.924350 4721 scope.go:117] "RemoveContainer" containerID="504f8e46a4f1f8af3783291345c70c28c9ee205ee445afa0c855c992e843d15e" Jan 30 21:36:52 crc kubenswrapper[4721]: I0130 21:36:52.951333 4721 scope.go:117] "RemoveContainer" containerID="f1ed3b731a60817881849868bab52d1ec671f9109bad19d1256749eb9c0fad28" Jan 30 21:37:59 crc kubenswrapper[4721]: I0130 21:37:59.448399 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:37:59 crc kubenswrapper[4721]: I0130 21:37:59.449140 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:38:29 crc kubenswrapper[4721]: I0130 21:38:29.448458 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:38:29 crc kubenswrapper[4721]: I0130 21:38:29.449748 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:38:59 crc kubenswrapper[4721]: I0130 21:38:59.448574 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:38:59 crc kubenswrapper[4721]: I0130 21:38:59.449505 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:38:59 crc kubenswrapper[4721]: I0130 21:38:59.449564 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:38:59 crc kubenswrapper[4721]: I0130 21:38:59.450449 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"42fade44689770dc48c8ac1fb73d5f9b5b655130e03ffb51c7329e0d230e8309"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:38:59 crc kubenswrapper[4721]: I0130 21:38:59.450506 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://42fade44689770dc48c8ac1fb73d5f9b5b655130e03ffb51c7329e0d230e8309" gracePeriod=600 Jan 30 21:39:00 crc kubenswrapper[4721]: I0130 21:39:00.526328 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="42fade44689770dc48c8ac1fb73d5f9b5b655130e03ffb51c7329e0d230e8309" exitCode=0 Jan 30 21:39:00 crc kubenswrapper[4721]: I0130 21:39:00.526463 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"42fade44689770dc48c8ac1fb73d5f9b5b655130e03ffb51c7329e0d230e8309"} Jan 30 21:39:00 crc kubenswrapper[4721]: I0130 21:39:00.526873 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e"} Jan 30 21:39:00 crc kubenswrapper[4721]: I0130 21:39:00.526907 4721 scope.go:117] "RemoveContainer" containerID="5a85b81456f4b90fe93c8c62d60cf026368f362c59b118e8c9a626253c6590b2" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.469692 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm"] Jan 30 21:40:02 crc kubenswrapper[4721]: E0130 21:40:02.470791 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="extract-content" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.470807 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="extract-content" Jan 30 21:40:02 crc kubenswrapper[4721]: E0130 21:40:02.470818 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="extract-utilities" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.470824 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="extract-utilities" Jan 30 21:40:02 crc kubenswrapper[4721]: E0130 21:40:02.470838 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="extract-utilities" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.470845 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="extract-utilities" Jan 30 21:40:02 crc kubenswrapper[4721]: E0130 21:40:02.470856 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="registry-server" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.470863 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="registry-server" Jan 30 21:40:02 crc kubenswrapper[4721]: E0130 21:40:02.470876 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="extract-content" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.470882 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="extract-content" Jan 30 21:40:02 crc kubenswrapper[4721]: E0130 21:40:02.470899 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="registry-server" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.470905 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="registry-server" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.471006 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="41025bfc-99ed-4cac-a824-b63e3a222754" containerName="registry-server" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.471014 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee0bd386-6e49-41c9-a0ad-78c96374ed89" containerName="registry-server" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.471934 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.476609 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.483962 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm"] Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.649747 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.649865 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.649969 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9m4w\" (UniqueName: \"kubernetes.io/projected/02297407-20a9-4d67-8952-9e0b267ab930-kube-api-access-d9m4w\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.754893 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9m4w\" (UniqueName: \"kubernetes.io/projected/02297407-20a9-4d67-8952-9e0b267ab930-kube-api-access-d9m4w\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.755247 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.755595 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.756521 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.756567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.789553 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9m4w\" (UniqueName: \"kubernetes.io/projected/02297407-20a9-4d67-8952-9e0b267ab930-kube-api-access-d9m4w\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:02 crc kubenswrapper[4721]: I0130 21:40:02.794084 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:03 crc kubenswrapper[4721]: I0130 21:40:03.297336 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm"] Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.098314 4721 generic.go:334] "Generic (PLEG): container finished" podID="02297407-20a9-4d67-8952-9e0b267ab930" containerID="d889d98186986bc45a59d705ef4bf9ebcbec3a6ecfeb0b8ac1eb219391b8c148" exitCode=0 Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.101371 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.105564 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" event={"ID":"02297407-20a9-4d67-8952-9e0b267ab930","Type":"ContainerDied","Data":"d889d98186986bc45a59d705ef4bf9ebcbec3a6ecfeb0b8ac1eb219391b8c148"} Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.105641 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" event={"ID":"02297407-20a9-4d67-8952-9e0b267ab930","Type":"ContainerStarted","Data":"8fce0fe4ccda874ddbd0d0a69375a4b796b1c685c7887059be2d44298f8e394b"} Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.727885 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xkq9d"] Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.730785 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.760870 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xkq9d"] Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.835878 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-catalog-content\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.835944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjxc4\" (UniqueName: \"kubernetes.io/projected/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-kube-api-access-kjxc4\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.836014 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-utilities\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.937306 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-catalog-content\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.937366 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjxc4\" (UniqueName: \"kubernetes.io/projected/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-kube-api-access-kjxc4\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.937422 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-utilities\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.937993 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-catalog-content\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.938017 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-utilities\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:04 crc kubenswrapper[4721]: I0130 21:40:04.961668 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjxc4\" (UniqueName: \"kubernetes.io/projected/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-kube-api-access-kjxc4\") pod \"redhat-operators-xkq9d\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:05 crc kubenswrapper[4721]: I0130 21:40:05.053788 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:05 crc kubenswrapper[4721]: I0130 21:40:05.526342 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xkq9d"] Jan 30 21:40:05 crc kubenswrapper[4721]: W0130 21:40:05.536386 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dbdea20_e1aa_4b15_9522_7c97c4a954c6.slice/crio-5aa2d61d595468850733b24b2aecc5439e16ed8704d078b4342a8ac99eb513cc WatchSource:0}: Error finding container 5aa2d61d595468850733b24b2aecc5439e16ed8704d078b4342a8ac99eb513cc: Status 404 returned error can't find the container with id 5aa2d61d595468850733b24b2aecc5439e16ed8704d078b4342a8ac99eb513cc Jan 30 21:40:06 crc kubenswrapper[4721]: I0130 21:40:06.118833 4721 generic.go:334] "Generic (PLEG): container finished" podID="02297407-20a9-4d67-8952-9e0b267ab930" containerID="e43706cc029d83c2d4afe20fce6859d56a6e47ae2e7a9edee7dc4af9486fea90" exitCode=0 Jan 30 21:40:06 crc kubenswrapper[4721]: I0130 21:40:06.118914 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" event={"ID":"02297407-20a9-4d67-8952-9e0b267ab930","Type":"ContainerDied","Data":"e43706cc029d83c2d4afe20fce6859d56a6e47ae2e7a9edee7dc4af9486fea90"} Jan 30 21:40:06 crc kubenswrapper[4721]: I0130 21:40:06.123002 4721 generic.go:334] "Generic (PLEG): container finished" podID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerID="d97ea491546d0147408ad185b26b6fdbf927dd9e83b8239b85c4ba4a5911df52" exitCode=0 Jan 30 21:40:06 crc kubenswrapper[4721]: I0130 21:40:06.123054 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkq9d" event={"ID":"7dbdea20-e1aa-4b15-9522-7c97c4a954c6","Type":"ContainerDied","Data":"d97ea491546d0147408ad185b26b6fdbf927dd9e83b8239b85c4ba4a5911df52"} Jan 30 21:40:06 crc kubenswrapper[4721]: I0130 21:40:06.123084 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkq9d" event={"ID":"7dbdea20-e1aa-4b15-9522-7c97c4a954c6","Type":"ContainerStarted","Data":"5aa2d61d595468850733b24b2aecc5439e16ed8704d078b4342a8ac99eb513cc"} Jan 30 21:40:07 crc kubenswrapper[4721]: I0130 21:40:07.134646 4721 generic.go:334] "Generic (PLEG): container finished" podID="02297407-20a9-4d67-8952-9e0b267ab930" containerID="cb57c99d261b9d32b958114820a5255b6dafe8b39eb31ec17ebfa9f0d268211f" exitCode=0 Jan 30 21:40:07 crc kubenswrapper[4721]: I0130 21:40:07.134867 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" event={"ID":"02297407-20a9-4d67-8952-9e0b267ab930","Type":"ContainerDied","Data":"cb57c99d261b9d32b958114820a5255b6dafe8b39eb31ec17ebfa9f0d268211f"} Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.144326 4721 generic.go:334] "Generic (PLEG): container finished" podID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerID="bc946c7ff8816069ee191f02223ba20024b007ec894bf2ec8debafeab03ab0aa" exitCode=0 Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.144437 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkq9d" event={"ID":"7dbdea20-e1aa-4b15-9522-7c97c4a954c6","Type":"ContainerDied","Data":"bc946c7ff8816069ee191f02223ba20024b007ec894bf2ec8debafeab03ab0aa"} Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.476354 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.528640 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9m4w\" (UniqueName: \"kubernetes.io/projected/02297407-20a9-4d67-8952-9e0b267ab930-kube-api-access-d9m4w\") pod \"02297407-20a9-4d67-8952-9e0b267ab930\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.528732 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-bundle\") pod \"02297407-20a9-4d67-8952-9e0b267ab930\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.528781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-util\") pod \"02297407-20a9-4d67-8952-9e0b267ab930\" (UID: \"02297407-20a9-4d67-8952-9e0b267ab930\") " Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.529764 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-bundle" (OuterVolumeSpecName: "bundle") pod "02297407-20a9-4d67-8952-9e0b267ab930" (UID: "02297407-20a9-4d67-8952-9e0b267ab930"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.542591 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02297407-20a9-4d67-8952-9e0b267ab930-kube-api-access-d9m4w" (OuterVolumeSpecName: "kube-api-access-d9m4w") pod "02297407-20a9-4d67-8952-9e0b267ab930" (UID: "02297407-20a9-4d67-8952-9e0b267ab930"). InnerVolumeSpecName "kube-api-access-d9m4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.564733 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-util" (OuterVolumeSpecName: "util") pod "02297407-20a9-4d67-8952-9e0b267ab930" (UID: "02297407-20a9-4d67-8952-9e0b267ab930"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.631169 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.631215 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02297407-20a9-4d67-8952-9e0b267ab930-util\") on node \"crc\" DevicePath \"\"" Jan 30 21:40:08 crc kubenswrapper[4721]: I0130 21:40:08.631229 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9m4w\" (UniqueName: \"kubernetes.io/projected/02297407-20a9-4d67-8952-9e0b267ab930-kube-api-access-d9m4w\") on node \"crc\" DevicePath \"\"" Jan 30 21:40:09 crc kubenswrapper[4721]: I0130 21:40:09.161919 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" event={"ID":"02297407-20a9-4d67-8952-9e0b267ab930","Type":"ContainerDied","Data":"8fce0fe4ccda874ddbd0d0a69375a4b796b1c685c7887059be2d44298f8e394b"} Jan 30 21:40:09 crc kubenswrapper[4721]: I0130 21:40:09.162513 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fce0fe4ccda874ddbd0d0a69375a4b796b1c685c7887059be2d44298f8e394b" Jan 30 21:40:09 crc kubenswrapper[4721]: I0130 21:40:09.162010 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm" Jan 30 21:40:09 crc kubenswrapper[4721]: I0130 21:40:09.168065 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkq9d" event={"ID":"7dbdea20-e1aa-4b15-9522-7c97c4a954c6","Type":"ContainerStarted","Data":"affff7d116ca65ecc3ac642653c5fddaca34adfb291a96ffa625f79a4ea34c2c"} Jan 30 21:40:09 crc kubenswrapper[4721]: I0130 21:40:09.188390 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xkq9d" podStartSLOduration=2.747517577 podStartE2EDuration="5.188367895s" podCreationTimestamp="2026-01-30 21:40:04 +0000 UTC" firstStartedPulling="2026-01-30 21:40:06.124707456 +0000 UTC m=+1394.916608742" lastFinishedPulling="2026-01-30 21:40:08.565557814 +0000 UTC m=+1397.357459060" observedRunningTime="2026-01-30 21:40:09.184425393 +0000 UTC m=+1397.976326639" watchObservedRunningTime="2026-01-30 21:40:09.188367895 +0000 UTC m=+1397.980269131" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.060654 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-swldn"] Jan 30 21:40:13 crc kubenswrapper[4721]: E0130 21:40:13.061681 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="util" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.061715 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="util" Jan 30 21:40:13 crc kubenswrapper[4721]: E0130 21:40:13.061759 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="extract" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.061779 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="extract" Jan 30 21:40:13 crc kubenswrapper[4721]: E0130 21:40:13.061814 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="pull" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.061833 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="pull" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.062059 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="02297407-20a9-4d67-8952-9e0b267ab930" containerName="extract" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.062964 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-swldn" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.065848 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.066042 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-lrm6r" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.071109 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.114908 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-swldn"] Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.208545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr57t\" (UniqueName: \"kubernetes.io/projected/9ff9ae3f-3898-4963-996f-5ee35048f5af-kube-api-access-xr57t\") pod \"nmstate-operator-646758c888-swldn\" (UID: \"9ff9ae3f-3898-4963-996f-5ee35048f5af\") " pod="openshift-nmstate/nmstate-operator-646758c888-swldn" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.310675 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr57t\" (UniqueName: \"kubernetes.io/projected/9ff9ae3f-3898-4963-996f-5ee35048f5af-kube-api-access-xr57t\") pod \"nmstate-operator-646758c888-swldn\" (UID: \"9ff9ae3f-3898-4963-996f-5ee35048f5af\") " pod="openshift-nmstate/nmstate-operator-646758c888-swldn" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.329967 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr57t\" (UniqueName: \"kubernetes.io/projected/9ff9ae3f-3898-4963-996f-5ee35048f5af-kube-api-access-xr57t\") pod \"nmstate-operator-646758c888-swldn\" (UID: \"9ff9ae3f-3898-4963-996f-5ee35048f5af\") " pod="openshift-nmstate/nmstate-operator-646758c888-swldn" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.429082 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-swldn" Jan 30 21:40:13 crc kubenswrapper[4721]: I0130 21:40:13.772026 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-swldn"] Jan 30 21:40:14 crc kubenswrapper[4721]: I0130 21:40:14.204122 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-swldn" event={"ID":"9ff9ae3f-3898-4963-996f-5ee35048f5af","Type":"ContainerStarted","Data":"206fecd3ef4375fb4bd652b38a18d84af70999db96e7376081b4b72f50b7d7bd"} Jan 30 21:40:15 crc kubenswrapper[4721]: I0130 21:40:15.055349 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:15 crc kubenswrapper[4721]: I0130 21:40:15.055462 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:16 crc kubenswrapper[4721]: I0130 21:40:16.114330 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xkq9d" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="registry-server" probeResult="failure" output=< Jan 30 21:40:16 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:40:16 crc kubenswrapper[4721]: > Jan 30 21:40:17 crc kubenswrapper[4721]: I0130 21:40:17.226980 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-swldn" event={"ID":"9ff9ae3f-3898-4963-996f-5ee35048f5af","Type":"ContainerStarted","Data":"fa31a0b1fe5424f781bc9b3668d120793f9ead2df5c0667b22f069d846d21d16"} Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.882075 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-swldn" podStartSLOduration=8.477425216 podStartE2EDuration="10.882048084s" podCreationTimestamp="2026-01-30 21:40:13 +0000 UTC" firstStartedPulling="2026-01-30 21:40:13.782415959 +0000 UTC m=+1402.574317205" lastFinishedPulling="2026-01-30 21:40:16.187038837 +0000 UTC m=+1404.978940073" observedRunningTime="2026-01-30 21:40:17.251988446 +0000 UTC m=+1406.043889692" watchObservedRunningTime="2026-01-30 21:40:23.882048084 +0000 UTC m=+1412.673949330" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.884775 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-cv4hc"] Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.886357 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.889118 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz"] Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.890134 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.890838 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-dtn8t" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.894585 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.906560 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-cv4hc"] Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.911493 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz"] Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.914826 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvt22\" (UniqueName: \"kubernetes.io/projected/b623908d-b2f5-49d0-9810-a2638fee1d6a-kube-api-access-mvt22\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.914911 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b623908d-b2f5-49d0-9810-a2638fee1d6a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.950216 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-mqjg9"] Jan 30 21:40:23 crc kubenswrapper[4721]: I0130 21:40:23.951292 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.016865 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf5jg\" (UniqueName: \"kubernetes.io/projected/fc4f4701-25d8-4af1-9128-d625c2448550-kube-api-access-mf5jg\") pod \"nmstate-metrics-54757c584b-cv4hc\" (UID: \"fc4f4701-25d8-4af1-9128-d625c2448550\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.016983 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvt22\" (UniqueName: \"kubernetes.io/projected/b623908d-b2f5-49d0-9810-a2638fee1d6a-kube-api-access-mvt22\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.017570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b623908d-b2f5-49d0-9810-a2638fee1d6a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:24 crc kubenswrapper[4721]: E0130 21:40:24.017750 4721 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 30 21:40:24 crc kubenswrapper[4721]: E0130 21:40:24.017829 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b623908d-b2f5-49d0-9810-a2638fee1d6a-tls-key-pair podName:b623908d-b2f5-49d0-9810-a2638fee1d6a nodeName:}" failed. No retries permitted until 2026-01-30 21:40:24.51780474 +0000 UTC m=+1413.309705986 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/b623908d-b2f5-49d0-9810-a2638fee1d6a-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-t8rgz" (UID: "b623908d-b2f5-49d0-9810-a2638fee1d6a") : secret "openshift-nmstate-webhook" not found Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.042216 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvt22\" (UniqueName: \"kubernetes.io/projected/b623908d-b2f5-49d0-9810-a2638fee1d6a-kube-api-access-mvt22\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.076738 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn"] Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.077696 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.081904 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.082403 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-x4b7j" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.082861 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123512 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-nmstate-lock\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123572 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4lrr\" (UniqueName: \"kubernetes.io/projected/acdc2ab0-dd4b-4cb0-a325-ae7569073244-kube-api-access-j4lrr\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123625 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwj4l\" (UniqueName: \"kubernetes.io/projected/13af9eb8-866f-4f4f-9698-e1208720edea-kube-api-access-kwj4l\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123668 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-dbus-socket\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123688 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/13af9eb8-866f-4f4f-9698-e1208720edea-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123714 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-ovs-socket\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123735 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/13af9eb8-866f-4f4f-9698-e1208720edea-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.123755 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf5jg\" (UniqueName: \"kubernetes.io/projected/fc4f4701-25d8-4af1-9128-d625c2448550-kube-api-access-mf5jg\") pod \"nmstate-metrics-54757c584b-cv4hc\" (UID: \"fc4f4701-25d8-4af1-9128-d625c2448550\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.143522 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn"] Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.163450 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf5jg\" (UniqueName: \"kubernetes.io/projected/fc4f4701-25d8-4af1-9128-d625c2448550-kube-api-access-mf5jg\") pod \"nmstate-metrics-54757c584b-cv4hc\" (UID: \"fc4f4701-25d8-4af1-9128-d625c2448550\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.212768 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.224940 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-ovs-socket\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225018 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/13af9eb8-866f-4f4f-9698-e1208720edea-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225088 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-nmstate-lock\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225119 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4lrr\" (UniqueName: \"kubernetes.io/projected/acdc2ab0-dd4b-4cb0-a325-ae7569073244-kube-api-access-j4lrr\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225167 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwj4l\" (UniqueName: \"kubernetes.io/projected/13af9eb8-866f-4f4f-9698-e1208720edea-kube-api-access-kwj4l\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225224 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-dbus-socket\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225226 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-nmstate-lock\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225272 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/13af9eb8-866f-4f4f-9698-e1208720edea-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.225085 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-ovs-socket\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.226050 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/acdc2ab0-dd4b-4cb0-a325-ae7569073244-dbus-socket\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.226340 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/13af9eb8-866f-4f4f-9698-e1208720edea-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.236388 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/13af9eb8-866f-4f4f-9698-e1208720edea-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.244720 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4lrr\" (UniqueName: \"kubernetes.io/projected/acdc2ab0-dd4b-4cb0-a325-ae7569073244-kube-api-access-j4lrr\") pod \"nmstate-handler-mqjg9\" (UID: \"acdc2ab0-dd4b-4cb0-a325-ae7569073244\") " pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.251116 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwj4l\" (UniqueName: \"kubernetes.io/projected/13af9eb8-866f-4f4f-9698-e1208720edea-kube-api-access-kwj4l\") pod \"nmstate-console-plugin-7754f76f8b-4n7dn\" (UID: \"13af9eb8-866f-4f4f-9698-e1208720edea\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.306821 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.339248 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-57654f46f6-q2wn9"] Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.341687 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.351678 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-57654f46f6-q2wn9"] Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.405195 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532491 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-console-config\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532574 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b623908d-b2f5-49d0-9810-a2638fee1d6a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532610 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/09ebb839-66d3-4463-ac49-117b28aeb265-console-oauth-config\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532643 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-trusted-ca-bundle\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532666 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-service-ca\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532761 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-oauth-serving-cert\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzfd2\" (UniqueName: \"kubernetes.io/projected/09ebb839-66d3-4463-ac49-117b28aeb265-kube-api-access-mzfd2\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.532960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/09ebb839-66d3-4463-ac49-117b28aeb265-console-serving-cert\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.540157 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b623908d-b2f5-49d0-9810-a2638fee1d6a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t8rgz\" (UID: \"b623908d-b2f5-49d0-9810-a2638fee1d6a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634536 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzfd2\" (UniqueName: \"kubernetes.io/projected/09ebb839-66d3-4463-ac49-117b28aeb265-kube-api-access-mzfd2\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634624 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/09ebb839-66d3-4463-ac49-117b28aeb265-console-serving-cert\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-console-config\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634697 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/09ebb839-66d3-4463-ac49-117b28aeb265-console-oauth-config\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634726 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-trusted-ca-bundle\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634749 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-service-ca\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.634773 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-oauth-serving-cert\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.636433 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-oauth-serving-cert\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.636546 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-console-config\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.636894 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-service-ca\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.637552 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ebb839-66d3-4463-ac49-117b28aeb265-trusted-ca-bundle\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.639125 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/09ebb839-66d3-4463-ac49-117b28aeb265-console-serving-cert\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.639670 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/09ebb839-66d3-4463-ac49-117b28aeb265-console-oauth-config\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.655706 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzfd2\" (UniqueName: \"kubernetes.io/projected/09ebb839-66d3-4463-ac49-117b28aeb265-kube-api-access-mzfd2\") pod \"console-57654f46f6-q2wn9\" (UID: \"09ebb839-66d3-4463-ac49-117b28aeb265\") " pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.668598 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn"] Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.683341 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-cv4hc"] Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.693855 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:24 crc kubenswrapper[4721]: W0130 21:40:24.703657 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc4f4701_25d8_4af1_9128_d625c2448550.slice/crio-4e6e9a660bc68147bb412cd92c385fc43dcb07b2bcdf44e43924020cc52ea1c2 WatchSource:0}: Error finding container 4e6e9a660bc68147bb412cd92c385fc43dcb07b2bcdf44e43924020cc52ea1c2: Status 404 returned error can't find the container with id 4e6e9a660bc68147bb412cd92c385fc43dcb07b2bcdf44e43924020cc52ea1c2 Jan 30 21:40:24 crc kubenswrapper[4721]: I0130 21:40:24.825414 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.018320 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-57654f46f6-q2wn9"] Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.069155 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz"] Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.124706 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.182163 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.292838 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-57654f46f6-q2wn9" event={"ID":"09ebb839-66d3-4463-ac49-117b28aeb265","Type":"ContainerStarted","Data":"65714d9ecc263e61cf3f7b6fbb997a843c33337f8943e71b0e4d96b80a658280"} Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.292903 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-57654f46f6-q2wn9" event={"ID":"09ebb839-66d3-4463-ac49-117b28aeb265","Type":"ContainerStarted","Data":"8fd46d17ca4946e9497858a0f8b8bc12cfcc0e97074c321f90185f65ba187816"} Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.293976 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" event={"ID":"b623908d-b2f5-49d0-9810-a2638fee1d6a","Type":"ContainerStarted","Data":"3078fc7df9d152fd630bfa6bd70fee53950adcd7c9cd2253c6f3f7b090a5974d"} Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.296737 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" event={"ID":"13af9eb8-866f-4f4f-9698-e1208720edea","Type":"ContainerStarted","Data":"c54fa63f85c53532c694c06fc402560a589cab6431e9f5a34369724986218e48"} Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.298246 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" event={"ID":"fc4f4701-25d8-4af1-9128-d625c2448550","Type":"ContainerStarted","Data":"4e6e9a660bc68147bb412cd92c385fc43dcb07b2bcdf44e43924020cc52ea1c2"} Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.300730 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-mqjg9" event={"ID":"acdc2ab0-dd4b-4cb0-a325-ae7569073244","Type":"ContainerStarted","Data":"cde9268842a80e48457815a0350da52e6b356057e5ed222ee25bb4fbb46d0307"} Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.322191 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-57654f46f6-q2wn9" podStartSLOduration=1.322159188 podStartE2EDuration="1.322159188s" podCreationTimestamp="2026-01-30 21:40:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:40:25.31767868 +0000 UTC m=+1414.109579976" watchObservedRunningTime="2026-01-30 21:40:25.322159188 +0000 UTC m=+1414.114060444" Jan 30 21:40:25 crc kubenswrapper[4721]: I0130 21:40:25.360994 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xkq9d"] Jan 30 21:40:26 crc kubenswrapper[4721]: I0130 21:40:26.312110 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xkq9d" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="registry-server" containerID="cri-o://affff7d116ca65ecc3ac642653c5fddaca34adfb291a96ffa625f79a4ea34c2c" gracePeriod=2 Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.323051 4721 generic.go:334] "Generic (PLEG): container finished" podID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerID="affff7d116ca65ecc3ac642653c5fddaca34adfb291a96ffa625f79a4ea34c2c" exitCode=0 Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.323125 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkq9d" event={"ID":"7dbdea20-e1aa-4b15-9522-7c97c4a954c6","Type":"ContainerDied","Data":"affff7d116ca65ecc3ac642653c5fddaca34adfb291a96ffa625f79a4ea34c2c"} Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.487151 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.687649 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-utilities\") pod \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.687798 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjxc4\" (UniqueName: \"kubernetes.io/projected/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-kube-api-access-kjxc4\") pod \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.687853 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-catalog-content\") pod \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\" (UID: \"7dbdea20-e1aa-4b15-9522-7c97c4a954c6\") " Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.689207 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-utilities" (OuterVolumeSpecName: "utilities") pod "7dbdea20-e1aa-4b15-9522-7c97c4a954c6" (UID: "7dbdea20-e1aa-4b15-9522-7c97c4a954c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.713938 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-kube-api-access-kjxc4" (OuterVolumeSpecName: "kube-api-access-kjxc4") pod "7dbdea20-e1aa-4b15-9522-7c97c4a954c6" (UID: "7dbdea20-e1aa-4b15-9522-7c97c4a954c6"). InnerVolumeSpecName "kube-api-access-kjxc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.790104 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjxc4\" (UniqueName: \"kubernetes.io/projected/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-kube-api-access-kjxc4\") on node \"crc\" DevicePath \"\"" Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.790178 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.827949 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dbdea20-e1aa-4b15-9522-7c97c4a954c6" (UID: "7dbdea20-e1aa-4b15-9522-7c97c4a954c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:40:27 crc kubenswrapper[4721]: I0130 21:40:27.891633 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dbdea20-e1aa-4b15-9522-7c97c4a954c6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.345510 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-mqjg9" event={"ID":"acdc2ab0-dd4b-4cb0-a325-ae7569073244","Type":"ContainerStarted","Data":"4054614a701796cdb7331e7666db2fc827ed313b52f65016a6548e93c13ea9e4"} Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.345666 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.351032 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xkq9d" event={"ID":"7dbdea20-e1aa-4b15-9522-7c97c4a954c6","Type":"ContainerDied","Data":"5aa2d61d595468850733b24b2aecc5439e16ed8704d078b4342a8ac99eb513cc"} Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.351099 4721 scope.go:117] "RemoveContainer" containerID="affff7d116ca65ecc3ac642653c5fddaca34adfb291a96ffa625f79a4ea34c2c" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.351043 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xkq9d" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.353891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" event={"ID":"b623908d-b2f5-49d0-9810-a2638fee1d6a","Type":"ContainerStarted","Data":"83ab0f0553a236121a89dd21122fba630a582468b6df52504e3a71e1288b4fbc"} Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.354124 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.358666 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" event={"ID":"13af9eb8-866f-4f4f-9698-e1208720edea","Type":"ContainerStarted","Data":"d5b8940d06ec86643c3232563bc994f22ac24eb35a412740783171402f58c376"} Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.360347 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" event={"ID":"fc4f4701-25d8-4af1-9128-d625c2448550","Type":"ContainerStarted","Data":"289144e1ab4efd8978ae7441a767fb0819efd4cfc740d0899a9904483829c0d7"} Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.368894 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-mqjg9" podStartSLOduration=2.254527769 podStartE2EDuration="5.368871074s" podCreationTimestamp="2026-01-30 21:40:23 +0000 UTC" firstStartedPulling="2026-01-30 21:40:24.374159806 +0000 UTC m=+1413.166061052" lastFinishedPulling="2026-01-30 21:40:27.488503071 +0000 UTC m=+1416.280404357" observedRunningTime="2026-01-30 21:40:28.364536189 +0000 UTC m=+1417.156437465" watchObservedRunningTime="2026-01-30 21:40:28.368871074 +0000 UTC m=+1417.160772320" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.387573 4721 scope.go:117] "RemoveContainer" containerID="bc946c7ff8816069ee191f02223ba20024b007ec894bf2ec8debafeab03ab0aa" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.394541 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xkq9d"] Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.402796 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xkq9d"] Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.410833 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-4n7dn" podStartSLOduration=1.643847013 podStartE2EDuration="4.409793759s" podCreationTimestamp="2026-01-30 21:40:24 +0000 UTC" firstStartedPulling="2026-01-30 21:40:24.660843657 +0000 UTC m=+1413.452744893" lastFinishedPulling="2026-01-30 21:40:27.426790373 +0000 UTC m=+1416.218691639" observedRunningTime="2026-01-30 21:40:28.407131506 +0000 UTC m=+1417.199032782" watchObservedRunningTime="2026-01-30 21:40:28.409793759 +0000 UTC m=+1417.201695015" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.418832 4721 scope.go:117] "RemoveContainer" containerID="d97ea491546d0147408ad185b26b6fdbf927dd9e83b8239b85c4ba4a5911df52" Jan 30 21:40:28 crc kubenswrapper[4721]: I0130 21:40:28.434410 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" podStartSLOduration=2.957284381 podStartE2EDuration="5.434386249s" podCreationTimestamp="2026-01-30 21:40:23 +0000 UTC" firstStartedPulling="2026-01-30 21:40:25.09703583 +0000 UTC m=+1413.888937076" lastFinishedPulling="2026-01-30 21:40:27.574137688 +0000 UTC m=+1416.366038944" observedRunningTime="2026-01-30 21:40:28.433823972 +0000 UTC m=+1417.225725228" watchObservedRunningTime="2026-01-30 21:40:28.434386249 +0000 UTC m=+1417.226287505" Jan 30 21:40:30 crc kubenswrapper[4721]: I0130 21:40:30.108136 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" path="/var/lib/kubelet/pods/7dbdea20-e1aa-4b15-9522-7c97c4a954c6/volumes" Jan 30 21:40:30 crc kubenswrapper[4721]: I0130 21:40:30.382767 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" event={"ID":"fc4f4701-25d8-4af1-9128-d625c2448550","Type":"ContainerStarted","Data":"2d1b71fe4740524805382d19c7ddfb6c58da5b701ab88398d604c6b269fd90b7"} Jan 30 21:40:30 crc kubenswrapper[4721]: I0130 21:40:30.411887 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-cv4hc" podStartSLOduration=2.010010161 podStartE2EDuration="7.411874544s" podCreationTimestamp="2026-01-30 21:40:23 +0000 UTC" firstStartedPulling="2026-01-30 21:40:24.71269067 +0000 UTC m=+1413.504591916" lastFinishedPulling="2026-01-30 21:40:30.114555053 +0000 UTC m=+1418.906456299" observedRunningTime="2026-01-30 21:40:30.408801609 +0000 UTC m=+1419.200702875" watchObservedRunningTime="2026-01-30 21:40:30.411874544 +0000 UTC m=+1419.203775790" Jan 30 21:40:34 crc kubenswrapper[4721]: I0130 21:40:34.349539 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-mqjg9" Jan 30 21:40:34 crc kubenswrapper[4721]: I0130 21:40:34.694294 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:34 crc kubenswrapper[4721]: I0130 21:40:34.694434 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:34 crc kubenswrapper[4721]: I0130 21:40:34.700526 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:35 crc kubenswrapper[4721]: I0130 21:40:35.438017 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-57654f46f6-q2wn9" Jan 30 21:40:35 crc kubenswrapper[4721]: I0130 21:40:35.544594 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xvtnr"] Jan 30 21:40:44 crc kubenswrapper[4721]: I0130 21:40:44.835383 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t8rgz" Jan 30 21:40:59 crc kubenswrapper[4721]: I0130 21:40:59.448340 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:40:59 crc kubenswrapper[4721]: I0130 21:40:59.449258 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:41:00 crc kubenswrapper[4721]: I0130 21:41:00.602595 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-xvtnr" podUID="715d346b-ba37-4920-a27b-5f9ef61133ef" containerName="console" containerID="cri-o://69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73" gracePeriod=15 Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.094173 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xvtnr_715d346b-ba37-4920-a27b-5f9ef61133ef/console/0.log" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.094343 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.263960 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-console-config\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.264033 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-serving-cert\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.264092 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-oauth-serving-cert\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.264140 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j42n\" (UniqueName: \"kubernetes.io/projected/715d346b-ba37-4920-a27b-5f9ef61133ef-kube-api-access-2j42n\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.264268 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-oauth-config\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.264433 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-trusted-ca-bundle\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.264487 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-service-ca\") pod \"715d346b-ba37-4920-a27b-5f9ef61133ef\" (UID: \"715d346b-ba37-4920-a27b-5f9ef61133ef\") " Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.265239 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.265892 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-service-ca" (OuterVolumeSpecName: "service-ca") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.266002 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-console-config" (OuterVolumeSpecName: "console-config") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.266091 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.267001 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.267038 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.267058 4721 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-console-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.267076 4721 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/715d346b-ba37-4920-a27b-5f9ef61133ef-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.275798 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/715d346b-ba37-4920-a27b-5f9ef61133ef-kube-api-access-2j42n" (OuterVolumeSpecName: "kube-api-access-2j42n") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "kube-api-access-2j42n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.275939 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.276758 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "715d346b-ba37-4920-a27b-5f9ef61133ef" (UID: "715d346b-ba37-4920-a27b-5f9ef61133ef"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.368710 4721 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.369384 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j42n\" (UniqueName: \"kubernetes.io/projected/715d346b-ba37-4920-a27b-5f9ef61133ef-kube-api-access-2j42n\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.369430 4721 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/715d346b-ba37-4920-a27b-5f9ef61133ef-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.727463 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xvtnr_715d346b-ba37-4920-a27b-5f9ef61133ef/console/0.log" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.727536 4721 generic.go:334] "Generic (PLEG): container finished" podID="715d346b-ba37-4920-a27b-5f9ef61133ef" containerID="69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73" exitCode=2 Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.727589 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xvtnr" event={"ID":"715d346b-ba37-4920-a27b-5f9ef61133ef","Type":"ContainerDied","Data":"69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73"} Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.727634 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xvtnr" event={"ID":"715d346b-ba37-4920-a27b-5f9ef61133ef","Type":"ContainerDied","Data":"252a233dcc9ac00ba17477057837a8939321e0a52fe841c029ccdf13b1a69a22"} Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.727649 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xvtnr" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.727663 4721 scope.go:117] "RemoveContainer" containerID="69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.757451 4721 scope.go:117] "RemoveContainer" containerID="69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73" Jan 30 21:41:01 crc kubenswrapper[4721]: E0130 21:41:01.758175 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73\": container with ID starting with 69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73 not found: ID does not exist" containerID="69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.758239 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73"} err="failed to get container status \"69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73\": rpc error: code = NotFound desc = could not find container \"69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73\": container with ID starting with 69b28429ed92fd98f280c465c238366468f0a89ec5b3c283d35e51c1fce27b73 not found: ID does not exist" Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.774728 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xvtnr"] Jan 30 21:41:01 crc kubenswrapper[4721]: I0130 21:41:01.781119 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-xvtnr"] Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.121256 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="715d346b-ba37-4920-a27b-5f9ef61133ef" path="/var/lib/kubelet/pods/715d346b-ba37-4920-a27b-5f9ef61133ef/volumes" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122053 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm"] Jan 30 21:41:02 crc kubenswrapper[4721]: E0130 21:41:02.122561 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="extract-utilities" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122584 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="extract-utilities" Jan 30 21:41:02 crc kubenswrapper[4721]: E0130 21:41:02.122598 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="715d346b-ba37-4920-a27b-5f9ef61133ef" containerName="console" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122603 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="715d346b-ba37-4920-a27b-5f9ef61133ef" containerName="console" Jan 30 21:41:02 crc kubenswrapper[4721]: E0130 21:41:02.122618 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="registry-server" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122627 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="registry-server" Jan 30 21:41:02 crc kubenswrapper[4721]: E0130 21:41:02.122649 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="extract-content" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122655 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="extract-content" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122840 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="715d346b-ba37-4920-a27b-5f9ef61133ef" containerName="console" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.122853 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dbdea20-e1aa-4b15-9522-7c97c4a954c6" containerName="registry-server" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.125642 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.131892 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.136306 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm"] Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.283379 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.283468 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czsvr\" (UniqueName: \"kubernetes.io/projected/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-kube-api-access-czsvr\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.283567 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.385426 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.385585 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.385651 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czsvr\" (UniqueName: \"kubernetes.io/projected/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-kube-api-access-czsvr\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.386601 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.387162 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.405802 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czsvr\" (UniqueName: \"kubernetes.io/projected/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-kube-api-access-czsvr\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.453151 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:02 crc kubenswrapper[4721]: I0130 21:41:02.758649 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm"] Jan 30 21:41:03 crc kubenswrapper[4721]: I0130 21:41:03.750393 4721 generic.go:334] "Generic (PLEG): container finished" podID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerID="f1cfcb472bebbdc4062922481ff66286480295f828acf72b1e71ebc42ae1431a" exitCode=0 Jan 30 21:41:03 crc kubenswrapper[4721]: I0130 21:41:03.750462 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" event={"ID":"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6","Type":"ContainerDied","Data":"f1cfcb472bebbdc4062922481ff66286480295f828acf72b1e71ebc42ae1431a"} Jan 30 21:41:03 crc kubenswrapper[4721]: I0130 21:41:03.750544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" event={"ID":"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6","Type":"ContainerStarted","Data":"c8fe596f8589276726be25fb443bb95f18dfaf1643123918a07bd9d928d0683f"} Jan 30 21:41:05 crc kubenswrapper[4721]: I0130 21:41:05.770589 4721 generic.go:334] "Generic (PLEG): container finished" podID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerID="80164ee3a61096f1d1ff8f77b8c8e3f6037b2378d1a776696e0ae77a34a27072" exitCode=0 Jan 30 21:41:05 crc kubenswrapper[4721]: I0130 21:41:05.770659 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" event={"ID":"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6","Type":"ContainerDied","Data":"80164ee3a61096f1d1ff8f77b8c8e3f6037b2378d1a776696e0ae77a34a27072"} Jan 30 21:41:06 crc kubenswrapper[4721]: I0130 21:41:06.783777 4721 generic.go:334] "Generic (PLEG): container finished" podID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerID="76e3e96f3a6dfb9a8eb8ebce826b879d7eb67500c32fab4e7e276f02391d8feb" exitCode=0 Jan 30 21:41:06 crc kubenswrapper[4721]: I0130 21:41:06.783842 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" event={"ID":"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6","Type":"ContainerDied","Data":"76e3e96f3a6dfb9a8eb8ebce826b879d7eb67500c32fab4e7e276f02391d8feb"} Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.156239 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.291016 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-bundle\") pod \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.292487 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czsvr\" (UniqueName: \"kubernetes.io/projected/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-kube-api-access-czsvr\") pod \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.292555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-util\") pod \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\" (UID: \"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6\") " Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.292924 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-bundle" (OuterVolumeSpecName: "bundle") pod "b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" (UID: "b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.305436 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-kube-api-access-czsvr" (OuterVolumeSpecName: "kube-api-access-czsvr") pod "b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" (UID: "b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6"). InnerVolumeSpecName "kube-api-access-czsvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.307436 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-util" (OuterVolumeSpecName: "util") pod "b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" (UID: "b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.395933 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.395978 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czsvr\" (UniqueName: \"kubernetes.io/projected/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-kube-api-access-czsvr\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.395993 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6-util\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.807621 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" event={"ID":"b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6","Type":"ContainerDied","Data":"c8fe596f8589276726be25fb443bb95f18dfaf1643123918a07bd9d928d0683f"} Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.807687 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8fe596f8589276726be25fb443bb95f18dfaf1643123918a07bd9d928d0683f" Jan 30 21:41:08 crc kubenswrapper[4721]: I0130 21:41:08.807843 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.070684 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw"] Jan 30 21:41:17 crc kubenswrapper[4721]: E0130 21:41:17.071614 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="util" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.071629 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="util" Jan 30 21:41:17 crc kubenswrapper[4721]: E0130 21:41:17.071641 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="extract" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.071648 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="extract" Jan 30 21:41:17 crc kubenswrapper[4721]: E0130 21:41:17.071666 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="pull" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.071673 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="pull" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.071783 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6" containerName="extract" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.072327 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.074602 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.074749 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.074904 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.074969 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.075533 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-8k4mv" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.089372 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw"] Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.241003 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-webhook-cert\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.241104 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n485c\" (UniqueName: \"kubernetes.io/projected/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-kube-api-access-n485c\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.241133 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-apiservice-cert\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.340008 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf"] Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.341580 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.344271 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-apiservice-cert\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.344332 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-webhook-cert\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.344378 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdgtz\" (UniqueName: \"kubernetes.io/projected/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-kube-api-access-bdgtz\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.344399 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-webhook-cert\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.344442 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n485c\" (UniqueName: \"kubernetes.io/projected/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-kube-api-access-n485c\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.344468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-apiservice-cert\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.348241 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.348587 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.348932 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-7mzzd" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.351615 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-webhook-cert\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.353583 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-apiservice-cert\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.372019 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf"] Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.374447 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n485c\" (UniqueName: \"kubernetes.io/projected/58594e33-cffd-4e67-99a7-7f3fb6b0d6f0-kube-api-access-n485c\") pod \"metallb-operator-controller-manager-5785c9bddd-96xvw\" (UID: \"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0\") " pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.393810 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.445468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-apiservice-cert\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.445543 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdgtz\" (UniqueName: \"kubernetes.io/projected/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-kube-api-access-bdgtz\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.445564 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-webhook-cert\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.450430 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-apiservice-cert\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.451828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-webhook-cert\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.471153 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdgtz\" (UniqueName: \"kubernetes.io/projected/dae4a1c8-c40a-4506-b9c8-b2146ef8c480-kube-api-access-bdgtz\") pod \"metallb-operator-webhook-server-5bf68458df-dm6mf\" (UID: \"dae4a1c8-c40a-4506-b9c8-b2146ef8c480\") " pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.476548 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.665540 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw"] Jan 30 21:41:17 crc kubenswrapper[4721]: I0130 21:41:17.870825 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" event={"ID":"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0","Type":"ContainerStarted","Data":"feeaf2bf2c8092e5a9e9d8bbd2be05d9a8692ea42aa166c4b660836fdfc0ff8b"} Jan 30 21:41:18 crc kubenswrapper[4721]: I0130 21:41:18.025910 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf"] Jan 30 21:41:18 crc kubenswrapper[4721]: W0130 21:41:18.035885 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddae4a1c8_c40a_4506_b9c8_b2146ef8c480.slice/crio-87c0a6f9e5e567b5f6133f0094443c777dd54dcbeeff689c7634a54d4de8eb20 WatchSource:0}: Error finding container 87c0a6f9e5e567b5f6133f0094443c777dd54dcbeeff689c7634a54d4de8eb20: Status 404 returned error can't find the container with id 87c0a6f9e5e567b5f6133f0094443c777dd54dcbeeff689c7634a54d4de8eb20 Jan 30 21:41:18 crc kubenswrapper[4721]: I0130 21:41:18.884890 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" event={"ID":"dae4a1c8-c40a-4506-b9c8-b2146ef8c480","Type":"ContainerStarted","Data":"87c0a6f9e5e567b5f6133f0094443c777dd54dcbeeff689c7634a54d4de8eb20"} Jan 30 21:41:21 crc kubenswrapper[4721]: I0130 21:41:21.912599 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" event={"ID":"58594e33-cffd-4e67-99a7-7f3fb6b0d6f0","Type":"ContainerStarted","Data":"814fca05dbe66dcda239024707149e54ebdda85f47532a37b8bd72a2871cfdea"} Jan 30 21:41:21 crc kubenswrapper[4721]: I0130 21:41:21.913379 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:22 crc kubenswrapper[4721]: I0130 21:41:22.119103 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" podStartSLOduration=1.558628613 podStartE2EDuration="5.119080885s" podCreationTimestamp="2026-01-30 21:41:17 +0000 UTC" firstStartedPulling="2026-01-30 21:41:17.694982072 +0000 UTC m=+1466.486883318" lastFinishedPulling="2026-01-30 21:41:21.255434344 +0000 UTC m=+1470.047335590" observedRunningTime="2026-01-30 21:41:21.933141093 +0000 UTC m=+1470.725042339" watchObservedRunningTime="2026-01-30 21:41:22.119080885 +0000 UTC m=+1470.910982131" Jan 30 21:41:23 crc kubenswrapper[4721]: I0130 21:41:23.937704 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" event={"ID":"dae4a1c8-c40a-4506-b9c8-b2146ef8c480","Type":"ContainerStarted","Data":"be39061b166559875ece4f307ca924f28259f92a528d461388ba4689d0a35fe8"} Jan 30 21:41:23 crc kubenswrapper[4721]: I0130 21:41:23.938979 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:23 crc kubenswrapper[4721]: I0130 21:41:23.979350 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" podStartSLOduration=1.480030511 podStartE2EDuration="6.979327433s" podCreationTimestamp="2026-01-30 21:41:17 +0000 UTC" firstStartedPulling="2026-01-30 21:41:18.040923075 +0000 UTC m=+1466.832824321" lastFinishedPulling="2026-01-30 21:41:23.540219997 +0000 UTC m=+1472.332121243" observedRunningTime="2026-01-30 21:41:23.976111404 +0000 UTC m=+1472.768012680" watchObservedRunningTime="2026-01-30 21:41:23.979327433 +0000 UTC m=+1472.771228679" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.668394 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rpc79"] Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.670732 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.688138 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpc79"] Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.853419 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-catalog-content\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.853481 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7cfw\" (UniqueName: \"kubernetes.io/projected/9abcc621-02a4-435b-92d2-0897a0a8039b-kube-api-access-b7cfw\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.853722 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-utilities\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.954927 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-utilities\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.955451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-catalog-content\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.955482 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7cfw\" (UniqueName: \"kubernetes.io/projected/9abcc621-02a4-435b-92d2-0897a0a8039b-kube-api-access-b7cfw\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.955556 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-utilities\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.956077 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-catalog-content\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:28 crc kubenswrapper[4721]: I0130 21:41:28.991530 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7cfw\" (UniqueName: \"kubernetes.io/projected/9abcc621-02a4-435b-92d2-0897a0a8039b-kube-api-access-b7cfw\") pod \"redhat-marketplace-rpc79\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:29 crc kubenswrapper[4721]: I0130 21:41:29.288949 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:29 crc kubenswrapper[4721]: I0130 21:41:29.454961 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:41:29 crc kubenswrapper[4721]: I0130 21:41:29.455551 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:41:29 crc kubenswrapper[4721]: I0130 21:41:29.769061 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpc79"] Jan 30 21:41:29 crc kubenswrapper[4721]: I0130 21:41:29.986462 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerStarted","Data":"8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810"} Jan 30 21:41:29 crc kubenswrapper[4721]: I0130 21:41:29.986724 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerStarted","Data":"9d46acb26539309f6ac0e4e1fc1c5254ee01f9debbd86d5ae05834b288d4ae0b"} Jan 30 21:41:30 crc kubenswrapper[4721]: I0130 21:41:30.995716 4721 generic.go:334] "Generic (PLEG): container finished" podID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerID="8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810" exitCode=0 Jan 30 21:41:30 crc kubenswrapper[4721]: I0130 21:41:30.995789 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerDied","Data":"8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810"} Jan 30 21:41:33 crc kubenswrapper[4721]: I0130 21:41:33.015570 4721 generic.go:334] "Generic (PLEG): container finished" podID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerID="a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca" exitCode=0 Jan 30 21:41:33 crc kubenswrapper[4721]: I0130 21:41:33.015680 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerDied","Data":"a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca"} Jan 30 21:41:34 crc kubenswrapper[4721]: I0130 21:41:34.029019 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerStarted","Data":"d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278"} Jan 30 21:41:34 crc kubenswrapper[4721]: I0130 21:41:34.057528 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rpc79" podStartSLOduration=3.660681039 podStartE2EDuration="6.057498788s" podCreationTimestamp="2026-01-30 21:41:28 +0000 UTC" firstStartedPulling="2026-01-30 21:41:30.998033727 +0000 UTC m=+1479.789935013" lastFinishedPulling="2026-01-30 21:41:33.394851516 +0000 UTC m=+1482.186752762" observedRunningTime="2026-01-30 21:41:34.053190505 +0000 UTC m=+1482.845091761" watchObservedRunningTime="2026-01-30 21:41:34.057498788 +0000 UTC m=+1482.849400044" Jan 30 21:41:37 crc kubenswrapper[4721]: I0130 21:41:37.481926 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5bf68458df-dm6mf" Jan 30 21:41:39 crc kubenswrapper[4721]: I0130 21:41:39.289181 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:39 crc kubenswrapper[4721]: I0130 21:41:39.289844 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:39 crc kubenswrapper[4721]: I0130 21:41:39.389313 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:40 crc kubenswrapper[4721]: I0130 21:41:40.149014 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:41 crc kubenswrapper[4721]: I0130 21:41:41.620915 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpc79"] Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.105846 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rpc79" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="registry-server" containerID="cri-o://d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278" gracePeriod=2 Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.535674 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.705105 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-utilities\") pod \"9abcc621-02a4-435b-92d2-0897a0a8039b\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.705186 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-catalog-content\") pod \"9abcc621-02a4-435b-92d2-0897a0a8039b\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.705226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7cfw\" (UniqueName: \"kubernetes.io/projected/9abcc621-02a4-435b-92d2-0897a0a8039b-kube-api-access-b7cfw\") pod \"9abcc621-02a4-435b-92d2-0897a0a8039b\" (UID: \"9abcc621-02a4-435b-92d2-0897a0a8039b\") " Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.706003 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-utilities" (OuterVolumeSpecName: "utilities") pod "9abcc621-02a4-435b-92d2-0897a0a8039b" (UID: "9abcc621-02a4-435b-92d2-0897a0a8039b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.715531 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9abcc621-02a4-435b-92d2-0897a0a8039b-kube-api-access-b7cfw" (OuterVolumeSpecName: "kube-api-access-b7cfw") pod "9abcc621-02a4-435b-92d2-0897a0a8039b" (UID: "9abcc621-02a4-435b-92d2-0897a0a8039b"). InnerVolumeSpecName "kube-api-access-b7cfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.745738 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9abcc621-02a4-435b-92d2-0897a0a8039b" (UID: "9abcc621-02a4-435b-92d2-0897a0a8039b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.807741 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7cfw\" (UniqueName: \"kubernetes.io/projected/9abcc621-02a4-435b-92d2-0897a0a8039b-kube-api-access-b7cfw\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.807826 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:42 crc kubenswrapper[4721]: I0130 21:41:42.807848 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abcc621-02a4-435b-92d2-0897a0a8039b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.117432 4721 generic.go:334] "Generic (PLEG): container finished" podID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerID="d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278" exitCode=0 Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.117498 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerDied","Data":"d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278"} Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.117595 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rpc79" event={"ID":"9abcc621-02a4-435b-92d2-0897a0a8039b","Type":"ContainerDied","Data":"9d46acb26539309f6ac0e4e1fc1c5254ee01f9debbd86d5ae05834b288d4ae0b"} Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.117608 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rpc79" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.117624 4721 scope.go:117] "RemoveContainer" containerID="d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.142587 4721 scope.go:117] "RemoveContainer" containerID="a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.160278 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpc79"] Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.172992 4721 scope.go:117] "RemoveContainer" containerID="8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.181441 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rpc79"] Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.197063 4721 scope.go:117] "RemoveContainer" containerID="d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278" Jan 30 21:41:43 crc kubenswrapper[4721]: E0130 21:41:43.197920 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278\": container with ID starting with d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278 not found: ID does not exist" containerID="d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.197974 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278"} err="failed to get container status \"d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278\": rpc error: code = NotFound desc = could not find container \"d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278\": container with ID starting with d3c37192e3707d4050ec1490d8fc1fe1e1bd12cc1e22b6b81de187c31a4ad278 not found: ID does not exist" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.198012 4721 scope.go:117] "RemoveContainer" containerID="a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca" Jan 30 21:41:43 crc kubenswrapper[4721]: E0130 21:41:43.198476 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca\": container with ID starting with a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca not found: ID does not exist" containerID="a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.198504 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca"} err="failed to get container status \"a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca\": rpc error: code = NotFound desc = could not find container \"a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca\": container with ID starting with a603d04ac8291aeb682402fba1f12a23a7ef7be1267cd067e2601e855dc9b3ca not found: ID does not exist" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.198525 4721 scope.go:117] "RemoveContainer" containerID="8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810" Jan 30 21:41:43 crc kubenswrapper[4721]: E0130 21:41:43.198907 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810\": container with ID starting with 8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810 not found: ID does not exist" containerID="8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810" Jan 30 21:41:43 crc kubenswrapper[4721]: I0130 21:41:43.198938 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810"} err="failed to get container status \"8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810\": rpc error: code = NotFound desc = could not find container \"8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810\": container with ID starting with 8470272e34849ef4bfae0ec1cdd09deebb1eb4664956d11b217231029de27810 not found: ID does not exist" Jan 30 21:41:44 crc kubenswrapper[4721]: I0130 21:41:44.112755 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" path="/var/lib/kubelet/pods/9abcc621-02a4-435b-92d2-0897a0a8039b/volumes" Jan 30 21:41:57 crc kubenswrapper[4721]: I0130 21:41:57.398409 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5785c9bddd-96xvw" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.216997 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-thrjz"] Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.217869 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="extract-content" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.217887 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="extract-content" Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.217905 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="extract-utilities" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.217916 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="extract-utilities" Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.217932 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="registry-server" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.217941 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="registry-server" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.218117 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9abcc621-02a4-435b-92d2-0897a0a8039b" containerName="registry-server" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.220960 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.230277 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-rkdmg" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.230903 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.231584 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.232176 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr"] Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.233347 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.239416 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.250899 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr"] Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.324631 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-flglj"] Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.325852 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.327681 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4mkg8" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.328277 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.328676 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.328997 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.356124 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-4xz99"] Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.358181 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.365875 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.378038 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-4xz99"] Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.414702 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdtst\" (UniqueName: \"kubernetes.io/projected/3aee18f8-337b-4dfc-9951-c44ea52f5193-kube-api-access-fdtst\") pod \"frr-k8s-webhook-server-7df86c4f6c-t5qjr\" (UID: \"3aee18f8-337b-4dfc-9951-c44ea52f5193\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.414953 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-reloader\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415074 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-startup\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415097 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-conf\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415119 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-sockets\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415168 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics-certs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415223 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415267 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trsbs\" (UniqueName: \"kubernetes.io/projected/501b07ab-d449-4910-a0f2-e37dcef83f0b-kube-api-access-trsbs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.415511 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aee18f8-337b-4dfc-9951-c44ea52f5193-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-t5qjr\" (UID: \"3aee18f8-337b-4dfc-9951-c44ea52f5193\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517237 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aee18f8-337b-4dfc-9951-c44ea52f5193-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-t5qjr\" (UID: \"3aee18f8-337b-4dfc-9951-c44ea52f5193\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517349 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdtst\" (UniqueName: \"kubernetes.io/projected/3aee18f8-337b-4dfc-9951-c44ea52f5193-kube-api-access-fdtst\") pod \"frr-k8s-webhook-server-7df86c4f6c-t5qjr\" (UID: \"3aee18f8-337b-4dfc-9951-c44ea52f5193\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517385 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-reloader\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517416 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kg5s\" (UniqueName: \"kubernetes.io/projected/852b81d6-0da5-4035-841b-2613bd3f2561-kube-api-access-6kg5s\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517449 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517472 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-startup\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517492 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-conf\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.517516 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-sockets\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518037 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics-certs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518093 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5447d1b-a776-4ddb-a90a-e926273205f3-metallb-excludel2\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518137 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxsfp\" (UniqueName: \"kubernetes.io/projected/d5447d1b-a776-4ddb-a90a-e926273205f3-kube-api-access-bxsfp\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518172 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518200 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-metrics-certs\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518227 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/852b81d6-0da5-4035-841b-2613bd3f2561-metrics-certs\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518255 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/852b81d6-0da5-4035-841b-2613bd3f2561-cert\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518250 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-sockets\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518283 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trsbs\" (UniqueName: \"kubernetes.io/projected/501b07ab-d449-4910-a0f2-e37dcef83f0b-kube-api-access-trsbs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518314 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-reloader\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.518435 4721 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518442 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-conf\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.518504 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics-certs podName:501b07ab-d449-4910-a0f2-e37dcef83f0b nodeName:}" failed. No retries permitted until 2026-01-30 21:41:59.018477296 +0000 UTC m=+1507.810378742 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics-certs") pod "frr-k8s-thrjz" (UID: "501b07ab-d449-4910-a0f2-e37dcef83f0b") : secret "frr-k8s-certs-secret" not found Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.518558 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.519030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/501b07ab-d449-4910-a0f2-e37dcef83f0b-frr-startup\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.530244 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aee18f8-337b-4dfc-9951-c44ea52f5193-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-t5qjr\" (UID: \"3aee18f8-337b-4dfc-9951-c44ea52f5193\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.539789 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trsbs\" (UniqueName: \"kubernetes.io/projected/501b07ab-d449-4910-a0f2-e37dcef83f0b-kube-api-access-trsbs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.542237 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdtst\" (UniqueName: \"kubernetes.io/projected/3aee18f8-337b-4dfc-9951-c44ea52f5193-kube-api-access-fdtst\") pod \"frr-k8s-webhook-server-7df86c4f6c-t5qjr\" (UID: \"3aee18f8-337b-4dfc-9951-c44ea52f5193\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.573560 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.618975 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kg5s\" (UniqueName: \"kubernetes.io/projected/852b81d6-0da5-4035-841b-2613bd3f2561-kube-api-access-6kg5s\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.619058 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.619140 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5447d1b-a776-4ddb-a90a-e926273205f3-metallb-excludel2\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.619167 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxsfp\" (UniqueName: \"kubernetes.io/projected/d5447d1b-a776-4ddb-a90a-e926273205f3-kube-api-access-bxsfp\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.619192 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-metrics-certs\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.619214 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/852b81d6-0da5-4035-841b-2613bd3f2561-metrics-certs\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.619237 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/852b81d6-0da5-4035-841b-2613bd3f2561-cert\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.619601 4721 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 30 21:41:58 crc kubenswrapper[4721]: E0130 21:41:58.619663 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist podName:d5447d1b-a776-4ddb-a90a-e926273205f3 nodeName:}" failed. No retries permitted until 2026-01-30 21:41:59.119645176 +0000 UTC m=+1507.911546422 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist") pod "speaker-flglj" (UID: "d5447d1b-a776-4ddb-a90a-e926273205f3") : secret "metallb-memberlist" not found Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.620405 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5447d1b-a776-4ddb-a90a-e926273205f3-metallb-excludel2\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.630089 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/852b81d6-0da5-4035-841b-2613bd3f2561-metrics-certs\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.630110 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-metrics-certs\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.630487 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.655935 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kg5s\" (UniqueName: \"kubernetes.io/projected/852b81d6-0da5-4035-841b-2613bd3f2561-kube-api-access-6kg5s\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.656627 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/852b81d6-0da5-4035-841b-2613bd3f2561-cert\") pod \"controller-6968d8fdc4-4xz99\" (UID: \"852b81d6-0da5-4035-841b-2613bd3f2561\") " pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.662075 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxsfp\" (UniqueName: \"kubernetes.io/projected/d5447d1b-a776-4ddb-a90a-e926273205f3-kube-api-access-bxsfp\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:58 crc kubenswrapper[4721]: I0130 21:41:58.676041 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.026204 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics-certs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.034479 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/501b07ab-d449-4910-a0f2-e37dcef83f0b-metrics-certs\") pod \"frr-k8s-thrjz\" (UID: \"501b07ab-d449-4910-a0f2-e37dcef83f0b\") " pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.082134 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr"] Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.133218 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:41:59 crc kubenswrapper[4721]: E0130 21:41:59.133515 4721 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 30 21:41:59 crc kubenswrapper[4721]: E0130 21:41:59.133657 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist podName:d5447d1b-a776-4ddb-a90a-e926273205f3 nodeName:}" failed. No retries permitted until 2026-01-30 21:42:00.133619849 +0000 UTC m=+1508.925521115 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist") pod "speaker-flglj" (UID: "d5447d1b-a776-4ddb-a90a-e926273205f3") : secret "metallb-memberlist" not found Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.144816 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-4xz99"] Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.152493 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-thrjz" Jan 30 21:41:59 crc kubenswrapper[4721]: W0130 21:41:59.158902 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod852b81d6_0da5_4035_841b_2613bd3f2561.slice/crio-3f9c1bc97a6472f708e14b55a1efd80c2136060aadec25a91649b87c2f7be14e WatchSource:0}: Error finding container 3f9c1bc97a6472f708e14b55a1efd80c2136060aadec25a91649b87c2f7be14e: Status 404 returned error can't find the container with id 3f9c1bc97a6472f708e14b55a1efd80c2136060aadec25a91649b87c2f7be14e Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.284952 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" event={"ID":"3aee18f8-337b-4dfc-9951-c44ea52f5193","Type":"ContainerStarted","Data":"a9df06e3a8ab3c1d1849cc1b31ef564093ef356290d619893acc0dd28f458ea1"} Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.289544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-4xz99" event={"ID":"852b81d6-0da5-4035-841b-2613bd3f2561","Type":"ContainerStarted","Data":"3f9c1bc97a6472f708e14b55a1efd80c2136060aadec25a91649b87c2f7be14e"} Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.448677 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.448781 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.448899 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.449829 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:41:59 crc kubenswrapper[4721]: I0130 21:41:59.449900 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" gracePeriod=600 Jan 30 21:41:59 crc kubenswrapper[4721]: E0130 21:41:59.579384 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.151061 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.176215 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5447d1b-a776-4ddb-a90a-e926273205f3-memberlist\") pod \"speaker-flglj\" (UID: \"d5447d1b-a776-4ddb-a90a-e926273205f3\") " pod="metallb-system/speaker-flglj" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.314258 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"d671bbe5f891b029ed9f171fd7f1304cad8b540da2b7293ab6beb98455664d88"} Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.340599 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" exitCode=0 Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.340829 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e"} Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.340883 4721 scope.go:117] "RemoveContainer" containerID="42fade44689770dc48c8ac1fb73d5f9b5b655130e03ffb51c7329e0d230e8309" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.341693 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:42:00 crc kubenswrapper[4721]: E0130 21:42:00.342094 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.358379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-4xz99" event={"ID":"852b81d6-0da5-4035-841b-2613bd3f2561","Type":"ContainerStarted","Data":"9572eed5dd65c9f604cc7a9147f097174c46f441b36788b5c80a3050a119c633"} Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.358447 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-4xz99" event={"ID":"852b81d6-0da5-4035-841b-2613bd3f2561","Type":"ContainerStarted","Data":"3bfb203e3f2ed063bc0b4ba7a0941224eee5a63a2705e42eedd54b829bf3b7d6"} Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.359125 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.443174 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-flglj" Jan 30 21:42:00 crc kubenswrapper[4721]: I0130 21:42:00.469755 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-4xz99" podStartSLOduration=2.4697114989999998 podStartE2EDuration="2.469711499s" podCreationTimestamp="2026-01-30 21:41:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:42:00.41289615 +0000 UTC m=+1509.204797396" watchObservedRunningTime="2026-01-30 21:42:00.469711499 +0000 UTC m=+1509.261612745" Jan 30 21:42:01 crc kubenswrapper[4721]: I0130 21:42:01.384099 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-flglj" event={"ID":"d5447d1b-a776-4ddb-a90a-e926273205f3","Type":"ContainerStarted","Data":"0a2e545d2f74c6fbb60e5d927f087aad9d4bc2aa420f302d134fd98bfbce6eee"} Jan 30 21:42:01 crc kubenswrapper[4721]: I0130 21:42:01.384529 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-flglj" event={"ID":"d5447d1b-a776-4ddb-a90a-e926273205f3","Type":"ContainerStarted","Data":"293e24e17255c8a55562546ef34d76e5d4525b44b6fa2c59611541d72a1a6f77"} Jan 30 21:42:01 crc kubenswrapper[4721]: I0130 21:42:01.384552 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-flglj" event={"ID":"d5447d1b-a776-4ddb-a90a-e926273205f3","Type":"ContainerStarted","Data":"d0e7472ccc5c8742db2696417476d7eae3d17e42b211b8bf3b378758a711a0a3"} Jan 30 21:42:01 crc kubenswrapper[4721]: I0130 21:42:01.384760 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-flglj" Jan 30 21:42:01 crc kubenswrapper[4721]: I0130 21:42:01.432740 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-flglj" podStartSLOduration=3.432715805 podStartE2EDuration="3.432715805s" podCreationTimestamp="2026-01-30 21:41:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:42:01.431757625 +0000 UTC m=+1510.223658871" watchObservedRunningTime="2026-01-30 21:42:01.432715805 +0000 UTC m=+1510.224617051" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.824728 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bmjzp"] Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.827720 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.831813 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bmjzp"] Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.893084 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-catalog-content\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.893151 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz8gg\" (UniqueName: \"kubernetes.io/projected/67eda167-420a-498a-bfd9-f6e364e550c2-kube-api-access-kz8gg\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.893190 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-utilities\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.994505 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-catalog-content\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.994570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz8gg\" (UniqueName: \"kubernetes.io/projected/67eda167-420a-498a-bfd9-f6e364e550c2-kube-api-access-kz8gg\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.994606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-utilities\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.995317 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-catalog-content\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:06 crc kubenswrapper[4721]: I0130 21:42:06.995345 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-utilities\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:07 crc kubenswrapper[4721]: I0130 21:42:07.047629 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz8gg\" (UniqueName: \"kubernetes.io/projected/67eda167-420a-498a-bfd9-f6e364e550c2-kube-api-access-kz8gg\") pod \"community-operators-bmjzp\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:07 crc kubenswrapper[4721]: I0130 21:42:07.159875 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:07 crc kubenswrapper[4721]: I0130 21:42:07.755676 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bmjzp"] Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.454591 4721 generic.go:334] "Generic (PLEG): container finished" podID="501b07ab-d449-4910-a0f2-e37dcef83f0b" containerID="0ffccb88012b39a182d9eb2222df0fd2dab27399ea73bfd985a057865c2f0587" exitCode=0 Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.455031 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerDied","Data":"0ffccb88012b39a182d9eb2222df0fd2dab27399ea73bfd985a057865c2f0587"} Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.459466 4721 generic.go:334] "Generic (PLEG): container finished" podID="67eda167-420a-498a-bfd9-f6e364e550c2" containerID="f2c27221e0a851010b7c9830fd31238dc07f7e24863f74c6e6bc411a8aad936d" exitCode=0 Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.459578 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmjzp" event={"ID":"67eda167-420a-498a-bfd9-f6e364e550c2","Type":"ContainerDied","Data":"f2c27221e0a851010b7c9830fd31238dc07f7e24863f74c6e6bc411a8aad936d"} Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.459627 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmjzp" event={"ID":"67eda167-420a-498a-bfd9-f6e364e550c2","Type":"ContainerStarted","Data":"50c9ccb4b4b3d31cd405a974a9c4c61dbb957ca10bec64ba8be42e97943b8255"} Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.462351 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" event={"ID":"3aee18f8-337b-4dfc-9951-c44ea52f5193","Type":"ContainerStarted","Data":"f71f9d2afe08f97a8846950742ae9daabf0fbaf921c1d1c1f10dd626b0cfb320"} Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.462742 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:42:08 crc kubenswrapper[4721]: I0130 21:42:08.529591 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" podStartSLOduration=2.359207131 podStartE2EDuration="10.529556836s" podCreationTimestamp="2026-01-30 21:41:58 +0000 UTC" firstStartedPulling="2026-01-30 21:41:59.094668164 +0000 UTC m=+1507.886569420" lastFinishedPulling="2026-01-30 21:42:07.265017879 +0000 UTC m=+1516.056919125" observedRunningTime="2026-01-30 21:42:08.518836775 +0000 UTC m=+1517.310738061" watchObservedRunningTime="2026-01-30 21:42:08.529556836 +0000 UTC m=+1517.321458122" Jan 30 21:42:09 crc kubenswrapper[4721]: I0130 21:42:09.486714 4721 generic.go:334] "Generic (PLEG): container finished" podID="501b07ab-d449-4910-a0f2-e37dcef83f0b" containerID="b297019fdbbe03f7e2d7334dee6f83d652ce698642a0f301e3127552e6718226" exitCode=0 Jan 30 21:42:09 crc kubenswrapper[4721]: I0130 21:42:09.486862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerDied","Data":"b297019fdbbe03f7e2d7334dee6f83d652ce698642a0f301e3127552e6718226"} Jan 30 21:42:10 crc kubenswrapper[4721]: I0130 21:42:10.451082 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-flglj" Jan 30 21:42:10 crc kubenswrapper[4721]: I0130 21:42:10.506666 4721 generic.go:334] "Generic (PLEG): container finished" podID="501b07ab-d449-4910-a0f2-e37dcef83f0b" containerID="aa38e50c7808f325db4c35f9072dfebc10677f29156c7cdcbbdad4998171b55c" exitCode=0 Jan 30 21:42:10 crc kubenswrapper[4721]: I0130 21:42:10.506757 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerDied","Data":"aa38e50c7808f325db4c35f9072dfebc10677f29156c7cdcbbdad4998171b55c"} Jan 30 21:42:10 crc kubenswrapper[4721]: I0130 21:42:10.511602 4721 generic.go:334] "Generic (PLEG): container finished" podID="67eda167-420a-498a-bfd9-f6e364e550c2" containerID="589fb4b004c70b768118ef2fa415a0269dcae437f0b4ce006fea20f2368bba6e" exitCode=0 Jan 30 21:42:10 crc kubenswrapper[4721]: I0130 21:42:10.511646 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmjzp" event={"ID":"67eda167-420a-498a-bfd9-f6e364e550c2","Type":"ContainerDied","Data":"589fb4b004c70b768118ef2fa415a0269dcae437f0b4ce006fea20f2368bba6e"} Jan 30 21:42:11 crc kubenswrapper[4721]: I0130 21:42:11.521241 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmjzp" event={"ID":"67eda167-420a-498a-bfd9-f6e364e550c2","Type":"ContainerStarted","Data":"91ca2ea488776b4f30efae7de9db802c4f86e135c736589b8190b8e7e5b2f2ed"} Jan 30 21:42:11 crc kubenswrapper[4721]: I0130 21:42:11.525050 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"2c330ca8f2d8093a6447d3cbc088ec638b7b42b96003ecc0792a5b88d300220f"} Jan 30 21:42:11 crc kubenswrapper[4721]: I0130 21:42:11.525114 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"2c9657723a9c57d5c3916c0c761ae12ff2b168883bbc498471c0a604fc191ba7"} Jan 30 21:42:11 crc kubenswrapper[4721]: I0130 21:42:11.525145 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"9a70b86fed674acda288719ffc577c19fbe19bacbe16077abc8e6c600f883af1"} Jan 30 21:42:11 crc kubenswrapper[4721]: I0130 21:42:11.525156 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"7e62b753a629e643b3a7fa2529d5068f4f742999050a770ff4ea3d7856560ba5"} Jan 30 21:42:11 crc kubenswrapper[4721]: I0130 21:42:11.548403 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bmjzp" podStartSLOduration=2.908205421 podStartE2EDuration="5.54837254s" podCreationTimestamp="2026-01-30 21:42:06 +0000 UTC" firstStartedPulling="2026-01-30 21:42:08.46183204 +0000 UTC m=+1517.253733306" lastFinishedPulling="2026-01-30 21:42:11.101999179 +0000 UTC m=+1519.893900425" observedRunningTime="2026-01-30 21:42:11.543233171 +0000 UTC m=+1520.335134417" watchObservedRunningTime="2026-01-30 21:42:11.54837254 +0000 UTC m=+1520.340273926" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.539880 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"a3db23c07d27ac2548d17341f1a74c85f921f9c4c46fa4441e79e606629b4b7e"} Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.539971 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thrjz" event={"ID":"501b07ab-d449-4910-a0f2-e37dcef83f0b","Type":"ContainerStarted","Data":"74d07be6514ffa684a6652e10807a48d4ac0baae96e9f5c715345562df127321"} Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.587070 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-thrjz" podStartSLOduration=6.567855678 podStartE2EDuration="14.587026946s" podCreationTimestamp="2026-01-30 21:41:58 +0000 UTC" firstStartedPulling="2026-01-30 21:41:59.307766907 +0000 UTC m=+1508.099668153" lastFinishedPulling="2026-01-30 21:42:07.326938175 +0000 UTC m=+1516.118839421" observedRunningTime="2026-01-30 21:42:12.57548336 +0000 UTC m=+1521.367384626" watchObservedRunningTime="2026-01-30 21:42:12.587026946 +0000 UTC m=+1521.378928222" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.805074 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j8qw6"] Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.807060 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.822845 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlpzq\" (UniqueName: \"kubernetes.io/projected/27ff43e9-8b2c-4781-a562-0ed695a73157-kube-api-access-qlpzq\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.823008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-catalog-content\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.823185 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-utilities\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.828401 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j8qw6"] Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.924937 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlpzq\" (UniqueName: \"kubernetes.io/projected/27ff43e9-8b2c-4781-a562-0ed695a73157-kube-api-access-qlpzq\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.925133 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-catalog-content\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.925354 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-utilities\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.925960 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-utilities\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.926233 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-catalog-content\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:12 crc kubenswrapper[4721]: I0130 21:42:12.957667 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlpzq\" (UniqueName: \"kubernetes.io/projected/27ff43e9-8b2c-4781-a562-0ed695a73157-kube-api-access-qlpzq\") pod \"certified-operators-j8qw6\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:13 crc kubenswrapper[4721]: I0130 21:42:13.142341 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:13 crc kubenswrapper[4721]: I0130 21:42:13.547403 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-thrjz" Jan 30 21:42:13 crc kubenswrapper[4721]: I0130 21:42:13.669738 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j8qw6"] Jan 30 21:42:14 crc kubenswrapper[4721]: I0130 21:42:14.152914 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-thrjz" Jan 30 21:42:14 crc kubenswrapper[4721]: I0130 21:42:14.197118 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-thrjz" Jan 30 21:42:14 crc kubenswrapper[4721]: I0130 21:42:14.559590 4721 generic.go:334] "Generic (PLEG): container finished" podID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerID="f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6" exitCode=0 Jan 30 21:42:14 crc kubenswrapper[4721]: I0130 21:42:14.559688 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerDied","Data":"f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6"} Jan 30 21:42:14 crc kubenswrapper[4721]: I0130 21:42:14.560221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerStarted","Data":"ac0cd18a7b75bb53f9db0a02e1dd4a1b940d495dfbd2a1354308c91c8fb492f2"} Jan 30 21:42:15 crc kubenswrapper[4721]: I0130 21:42:15.093866 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:42:15 crc kubenswrapper[4721]: E0130 21:42:15.094176 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:42:15 crc kubenswrapper[4721]: I0130 21:42:15.572122 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerStarted","Data":"2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4"} Jan 30 21:42:16 crc kubenswrapper[4721]: I0130 21:42:16.584134 4721 generic.go:334] "Generic (PLEG): container finished" podID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerID="2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4" exitCode=0 Jan 30 21:42:16 crc kubenswrapper[4721]: I0130 21:42:16.584229 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerDied","Data":"2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4"} Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.161914 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.162974 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.223592 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.406878 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-hvf87"] Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.408737 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.410864 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.412317 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-gwhb8" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.412874 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.415372 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-hvf87"] Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.518640 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhh4w\" (UniqueName: \"kubernetes.io/projected/c9bf578a-adbc-4168-a09b-edf084023bfa-kube-api-access-hhh4w\") pod \"openstack-operator-index-hvf87\" (UID: \"c9bf578a-adbc-4168-a09b-edf084023bfa\") " pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.596766 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerStarted","Data":"a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d"} Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.621250 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhh4w\" (UniqueName: \"kubernetes.io/projected/c9bf578a-adbc-4168-a09b-edf084023bfa-kube-api-access-hhh4w\") pod \"openstack-operator-index-hvf87\" (UID: \"c9bf578a-adbc-4168-a09b-edf084023bfa\") " pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.632123 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j8qw6" podStartSLOduration=3.189948663 podStartE2EDuration="5.632077894s" podCreationTimestamp="2026-01-30 21:42:12 +0000 UTC" firstStartedPulling="2026-01-30 21:42:14.562125728 +0000 UTC m=+1523.354026984" lastFinishedPulling="2026-01-30 21:42:17.004254959 +0000 UTC m=+1525.796156215" observedRunningTime="2026-01-30 21:42:17.627998738 +0000 UTC m=+1526.419899994" watchObservedRunningTime="2026-01-30 21:42:17.632077894 +0000 UTC m=+1526.423979150" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.662389 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhh4w\" (UniqueName: \"kubernetes.io/projected/c9bf578a-adbc-4168-a09b-edf084023bfa-kube-api-access-hhh4w\") pod \"openstack-operator-index-hvf87\" (UID: \"c9bf578a-adbc-4168-a09b-edf084023bfa\") " pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.665146 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:17 crc kubenswrapper[4721]: I0130 21:42:17.728633 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:18 crc kubenswrapper[4721]: I0130 21:42:18.268612 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-hvf87"] Jan 30 21:42:18 crc kubenswrapper[4721]: I0130 21:42:18.579548 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-t5qjr" Jan 30 21:42:18 crc kubenswrapper[4721]: I0130 21:42:18.609278 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hvf87" event={"ID":"c9bf578a-adbc-4168-a09b-edf084023bfa","Type":"ContainerStarted","Data":"0a8708bdd7e841f1a3c79e6e97ea654895dd620fb67e0e2d732f3d3ab7175e15"} Jan 30 21:42:18 crc kubenswrapper[4721]: I0130 21:42:18.681363 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-4xz99" Jan 30 21:42:22 crc kubenswrapper[4721]: I0130 21:42:22.392243 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bmjzp"] Jan 30 21:42:22 crc kubenswrapper[4721]: I0130 21:42:22.393884 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bmjzp" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="registry-server" containerID="cri-o://91ca2ea488776b4f30efae7de9db802c4f86e135c736589b8190b8e7e5b2f2ed" gracePeriod=2 Jan 30 21:42:22 crc kubenswrapper[4721]: I0130 21:42:22.657317 4721 generic.go:334] "Generic (PLEG): container finished" podID="67eda167-420a-498a-bfd9-f6e364e550c2" containerID="91ca2ea488776b4f30efae7de9db802c4f86e135c736589b8190b8e7e5b2f2ed" exitCode=0 Jan 30 21:42:22 crc kubenswrapper[4721]: I0130 21:42:22.657387 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmjzp" event={"ID":"67eda167-420a-498a-bfd9-f6e364e550c2","Type":"ContainerDied","Data":"91ca2ea488776b4f30efae7de9db802c4f86e135c736589b8190b8e7e5b2f2ed"} Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.143485 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.144372 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.210882 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.565793 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.669774 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmjzp" event={"ID":"67eda167-420a-498a-bfd9-f6e364e550c2","Type":"ContainerDied","Data":"50c9ccb4b4b3d31cd405a974a9c4c61dbb957ca10bec64ba8be42e97943b8255"} Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.669890 4721 scope.go:117] "RemoveContainer" containerID="91ca2ea488776b4f30efae7de9db802c4f86e135c736589b8190b8e7e5b2f2ed" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.670353 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmjzp" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.674340 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hvf87" event={"ID":"c9bf578a-adbc-4168-a09b-edf084023bfa","Type":"ContainerStarted","Data":"e59257d8da2b5c4f1c82cc693e41c73fa75f7084e422e62d904ade610e2f682c"} Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.697579 4721 scope.go:117] "RemoveContainer" containerID="589fb4b004c70b768118ef2fa415a0269dcae437f0b4ce006fea20f2368bba6e" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.707766 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-hvf87" podStartSLOduration=1.632413295 podStartE2EDuration="6.707728408s" podCreationTimestamp="2026-01-30 21:42:17 +0000 UTC" firstStartedPulling="2026-01-30 21:42:18.274966216 +0000 UTC m=+1527.066867502" lastFinishedPulling="2026-01-30 21:42:23.350281369 +0000 UTC m=+1532.142182615" observedRunningTime="2026-01-30 21:42:23.697142631 +0000 UTC m=+1532.489043917" watchObservedRunningTime="2026-01-30 21:42:23.707728408 +0000 UTC m=+1532.499629684" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.731468 4721 scope.go:117] "RemoveContainer" containerID="f2c27221e0a851010b7c9830fd31238dc07f7e24863f74c6e6bc411a8aad936d" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.733030 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.744558 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz8gg\" (UniqueName: \"kubernetes.io/projected/67eda167-420a-498a-bfd9-f6e364e550c2-kube-api-access-kz8gg\") pod \"67eda167-420a-498a-bfd9-f6e364e550c2\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.744755 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-catalog-content\") pod \"67eda167-420a-498a-bfd9-f6e364e550c2\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.744848 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-utilities\") pod \"67eda167-420a-498a-bfd9-f6e364e550c2\" (UID: \"67eda167-420a-498a-bfd9-f6e364e550c2\") " Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.745970 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-utilities" (OuterVolumeSpecName: "utilities") pod "67eda167-420a-498a-bfd9-f6e364e550c2" (UID: "67eda167-420a-498a-bfd9-f6e364e550c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.758389 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67eda167-420a-498a-bfd9-f6e364e550c2-kube-api-access-kz8gg" (OuterVolumeSpecName: "kube-api-access-kz8gg") pod "67eda167-420a-498a-bfd9-f6e364e550c2" (UID: "67eda167-420a-498a-bfd9-f6e364e550c2"). InnerVolumeSpecName "kube-api-access-kz8gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.840770 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67eda167-420a-498a-bfd9-f6e364e550c2" (UID: "67eda167-420a-498a-bfd9-f6e364e550c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.847781 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz8gg\" (UniqueName: \"kubernetes.io/projected/67eda167-420a-498a-bfd9-f6e364e550c2-kube-api-access-kz8gg\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.847820 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:23 crc kubenswrapper[4721]: I0130 21:42:23.847835 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67eda167-420a-498a-bfd9-f6e364e550c2-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:24 crc kubenswrapper[4721]: I0130 21:42:24.023971 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bmjzp"] Jan 30 21:42:24 crc kubenswrapper[4721]: I0130 21:42:24.034037 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bmjzp"] Jan 30 21:42:24 crc kubenswrapper[4721]: I0130 21:42:24.116835 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" path="/var/lib/kubelet/pods/67eda167-420a-498a-bfd9-f6e364e550c2/volumes" Jan 30 21:42:24 crc kubenswrapper[4721]: I0130 21:42:24.995874 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j8qw6"] Jan 30 21:42:26 crc kubenswrapper[4721]: I0130 21:42:26.092381 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:42:26 crc kubenswrapper[4721]: E0130 21:42:26.093157 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:42:26 crc kubenswrapper[4721]: I0130 21:42:26.704773 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j8qw6" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="registry-server" containerID="cri-o://a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d" gracePeriod=2 Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.670495 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.715236 4721 generic.go:334] "Generic (PLEG): container finished" podID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerID="a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d" exitCode=0 Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.715284 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerDied","Data":"a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d"} Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.715350 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j8qw6" event={"ID":"27ff43e9-8b2c-4781-a562-0ed695a73157","Type":"ContainerDied","Data":"ac0cd18a7b75bb53f9db0a02e1dd4a1b940d495dfbd2a1354308c91c8fb492f2"} Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.715376 4721 scope.go:117] "RemoveContainer" containerID="a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.715492 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j8qw6" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.728805 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.730539 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.736834 4721 scope.go:117] "RemoveContainer" containerID="2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.767645 4721 scope.go:117] "RemoveContainer" containerID="f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.768593 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.791717 4721 scope.go:117] "RemoveContainer" containerID="a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d" Jan 30 21:42:27 crc kubenswrapper[4721]: E0130 21:42:27.792419 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d\": container with ID starting with a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d not found: ID does not exist" containerID="a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.792454 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d"} err="failed to get container status \"a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d\": rpc error: code = NotFound desc = could not find container \"a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d\": container with ID starting with a15194db9ae31ce9a1478388cbf174c86012b5f3b4c8dabfa81d803facfb436d not found: ID does not exist" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.792485 4721 scope.go:117] "RemoveContainer" containerID="2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4" Jan 30 21:42:27 crc kubenswrapper[4721]: E0130 21:42:27.793009 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4\": container with ID starting with 2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4 not found: ID does not exist" containerID="2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.793067 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4"} err="failed to get container status \"2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4\": rpc error: code = NotFound desc = could not find container \"2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4\": container with ID starting with 2cd307c76a58cf1dee29a11f0dc9e6fc325afebaab895e017128cae63c1cb1c4 not found: ID does not exist" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.793104 4721 scope.go:117] "RemoveContainer" containerID="f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6" Jan 30 21:42:27 crc kubenswrapper[4721]: E0130 21:42:27.793595 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6\": container with ID starting with f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6 not found: ID does not exist" containerID="f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.793636 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6"} err="failed to get container status \"f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6\": rpc error: code = NotFound desc = could not find container \"f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6\": container with ID starting with f9696418c44c32dce9d6cb4776746f405ae072ae16212ad4f9a3f244c3d4d2a6 not found: ID does not exist" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.815747 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-catalog-content\") pod \"27ff43e9-8b2c-4781-a562-0ed695a73157\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.815881 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlpzq\" (UniqueName: \"kubernetes.io/projected/27ff43e9-8b2c-4781-a562-0ed695a73157-kube-api-access-qlpzq\") pod \"27ff43e9-8b2c-4781-a562-0ed695a73157\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.816572 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-utilities\") pod \"27ff43e9-8b2c-4781-a562-0ed695a73157\" (UID: \"27ff43e9-8b2c-4781-a562-0ed695a73157\") " Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.817426 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-utilities" (OuterVolumeSpecName: "utilities") pod "27ff43e9-8b2c-4781-a562-0ed695a73157" (UID: "27ff43e9-8b2c-4781-a562-0ed695a73157"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.823776 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ff43e9-8b2c-4781-a562-0ed695a73157-kube-api-access-qlpzq" (OuterVolumeSpecName: "kube-api-access-qlpzq") pod "27ff43e9-8b2c-4781-a562-0ed695a73157" (UID: "27ff43e9-8b2c-4781-a562-0ed695a73157"). InnerVolumeSpecName "kube-api-access-qlpzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.868592 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27ff43e9-8b2c-4781-a562-0ed695a73157" (UID: "27ff43e9-8b2c-4781-a562-0ed695a73157"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.921214 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.921270 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlpzq\" (UniqueName: \"kubernetes.io/projected/27ff43e9-8b2c-4781-a562-0ed695a73157-kube-api-access-qlpzq\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:27 crc kubenswrapper[4721]: I0130 21:42:27.921287 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ff43e9-8b2c-4781-a562-0ed695a73157-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:28 crc kubenswrapper[4721]: I0130 21:42:28.071524 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j8qw6"] Jan 30 21:42:28 crc kubenswrapper[4721]: I0130 21:42:28.079622 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j8qw6"] Jan 30 21:42:28 crc kubenswrapper[4721]: I0130 21:42:28.102036 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" path="/var/lib/kubelet/pods/27ff43e9-8b2c-4781-a562-0ed695a73157/volumes" Jan 30 21:42:28 crc kubenswrapper[4721]: I0130 21:42:28.758665 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-hvf87" Jan 30 21:42:29 crc kubenswrapper[4721]: I0130 21:42:29.156000 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-thrjz" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.654776 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth"] Jan 30 21:42:31 crc kubenswrapper[4721]: E0130 21:42:31.656562 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="registry-server" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.656693 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="registry-server" Jan 30 21:42:31 crc kubenswrapper[4721]: E0130 21:42:31.656769 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="extract-utilities" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.656840 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="extract-utilities" Jan 30 21:42:31 crc kubenswrapper[4721]: E0130 21:42:31.656918 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="extract-utilities" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.657006 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="extract-utilities" Jan 30 21:42:31 crc kubenswrapper[4721]: E0130 21:42:31.657095 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="extract-content" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.657176 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="extract-content" Jan 30 21:42:31 crc kubenswrapper[4721]: E0130 21:42:31.657265 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="extract-content" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.657369 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="extract-content" Jan 30 21:42:31 crc kubenswrapper[4721]: E0130 21:42:31.657447 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="registry-server" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.657512 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="registry-server" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.657767 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="67eda167-420a-498a-bfd9-f6e364e550c2" containerName="registry-server" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.657853 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ff43e9-8b2c-4781-a562-0ed695a73157" containerName="registry-server" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.659395 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.661924 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-4r7vp" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.673088 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth"] Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.816504 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-bundle\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.817353 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz8wk\" (UniqueName: \"kubernetes.io/projected/12495753-1318-435a-b2c4-33b9f35ba86b-kube-api-access-jz8wk\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.817469 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-util\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.919438 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz8wk\" (UniqueName: \"kubernetes.io/projected/12495753-1318-435a-b2c4-33b9f35ba86b-kube-api-access-jz8wk\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.919543 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-util\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.919631 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-bundle\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.920491 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-bundle\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.921440 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-util\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:31 crc kubenswrapper[4721]: I0130 21:42:31.958008 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz8wk\" (UniqueName: \"kubernetes.io/projected/12495753-1318-435a-b2c4-33b9f35ba86b-kube-api-access-jz8wk\") pod \"a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:32 crc kubenswrapper[4721]: I0130 21:42:32.002840 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:32 crc kubenswrapper[4721]: I0130 21:42:32.305842 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth"] Jan 30 21:42:32 crc kubenswrapper[4721]: I0130 21:42:32.764492 4721 generic.go:334] "Generic (PLEG): container finished" podID="12495753-1318-435a-b2c4-33b9f35ba86b" containerID="f4351bd8be83ad4a2b233923c2e4b02a0e5e8158f5609a15910713f700ee9b21" exitCode=0 Jan 30 21:42:32 crc kubenswrapper[4721]: I0130 21:42:32.764551 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" event={"ID":"12495753-1318-435a-b2c4-33b9f35ba86b","Type":"ContainerDied","Data":"f4351bd8be83ad4a2b233923c2e4b02a0e5e8158f5609a15910713f700ee9b21"} Jan 30 21:42:32 crc kubenswrapper[4721]: I0130 21:42:32.764586 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" event={"ID":"12495753-1318-435a-b2c4-33b9f35ba86b","Type":"ContainerStarted","Data":"59b4c25b937be30f8a2711ab81c4e844d83c88854fff039eb69aaa4dadedc2cd"} Jan 30 21:42:33 crc kubenswrapper[4721]: I0130 21:42:33.773987 4721 generic.go:334] "Generic (PLEG): container finished" podID="12495753-1318-435a-b2c4-33b9f35ba86b" containerID="f34de308f0ffee0c2a960e5d09ca603a3ae5bab1ed0585ba9af6f4881716cdcc" exitCode=0 Jan 30 21:42:33 crc kubenswrapper[4721]: I0130 21:42:33.774507 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" event={"ID":"12495753-1318-435a-b2c4-33b9f35ba86b","Type":"ContainerDied","Data":"f34de308f0ffee0c2a960e5d09ca603a3ae5bab1ed0585ba9af6f4881716cdcc"} Jan 30 21:42:34 crc kubenswrapper[4721]: I0130 21:42:34.788121 4721 generic.go:334] "Generic (PLEG): container finished" podID="12495753-1318-435a-b2c4-33b9f35ba86b" containerID="b85df2f82d4bf16cb4427f8b08e7523f0d1ef57eb91f685ccae45c94322b5635" exitCode=0 Jan 30 21:42:34 crc kubenswrapper[4721]: I0130 21:42:34.788199 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" event={"ID":"12495753-1318-435a-b2c4-33b9f35ba86b","Type":"ContainerDied","Data":"b85df2f82d4bf16cb4427f8b08e7523f0d1ef57eb91f685ccae45c94322b5635"} Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.172506 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.201698 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-bundle\") pod \"12495753-1318-435a-b2c4-33b9f35ba86b\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.201806 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-util\") pod \"12495753-1318-435a-b2c4-33b9f35ba86b\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.205160 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-bundle" (OuterVolumeSpecName: "bundle") pod "12495753-1318-435a-b2c4-33b9f35ba86b" (UID: "12495753-1318-435a-b2c4-33b9f35ba86b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.238489 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-util" (OuterVolumeSpecName: "util") pod "12495753-1318-435a-b2c4-33b9f35ba86b" (UID: "12495753-1318-435a-b2c4-33b9f35ba86b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.303508 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz8wk\" (UniqueName: \"kubernetes.io/projected/12495753-1318-435a-b2c4-33b9f35ba86b-kube-api-access-jz8wk\") pod \"12495753-1318-435a-b2c4-33b9f35ba86b\" (UID: \"12495753-1318-435a-b2c4-33b9f35ba86b\") " Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.304137 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.304163 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12495753-1318-435a-b2c4-33b9f35ba86b-util\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.311879 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12495753-1318-435a-b2c4-33b9f35ba86b-kube-api-access-jz8wk" (OuterVolumeSpecName: "kube-api-access-jz8wk") pod "12495753-1318-435a-b2c4-33b9f35ba86b" (UID: "12495753-1318-435a-b2c4-33b9f35ba86b"). InnerVolumeSpecName "kube-api-access-jz8wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.405284 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz8wk\" (UniqueName: \"kubernetes.io/projected/12495753-1318-435a-b2c4-33b9f35ba86b-kube-api-access-jz8wk\") on node \"crc\" DevicePath \"\"" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.814921 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" event={"ID":"12495753-1318-435a-b2c4-33b9f35ba86b","Type":"ContainerDied","Data":"59b4c25b937be30f8a2711ab81c4e844d83c88854fff039eb69aaa4dadedc2cd"} Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.815482 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59b4c25b937be30f8a2711ab81c4e844d83c88854fff039eb69aaa4dadedc2cd" Jan 30 21:42:36 crc kubenswrapper[4721]: I0130 21:42:36.815035 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth" Jan 30 21:42:40 crc kubenswrapper[4721]: I0130 21:42:40.093607 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:42:40 crc kubenswrapper[4721]: E0130 21:42:40.094152 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.773583 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7"] Jan 30 21:42:42 crc kubenswrapper[4721]: E0130 21:42:42.775093 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="pull" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.775225 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="pull" Jan 30 21:42:42 crc kubenswrapper[4721]: E0130 21:42:42.775292 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="extract" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.775367 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="extract" Jan 30 21:42:42 crc kubenswrapper[4721]: E0130 21:42:42.775437 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="util" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.775498 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="util" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.775686 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="12495753-1318-435a-b2c4-33b9f35ba86b" containerName="extract" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.776500 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.793806 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-fbc2b" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.812584 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld4kx\" (UniqueName: \"kubernetes.io/projected/16ce971e-b375-4472-bbf9-6310b8524952-kube-api-access-ld4kx\") pod \"openstack-operator-controller-init-8584c7c99f-224n7\" (UID: \"16ce971e-b375-4472-bbf9-6310b8524952\") " pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.817777 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7"] Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.913650 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld4kx\" (UniqueName: \"kubernetes.io/projected/16ce971e-b375-4472-bbf9-6310b8524952-kube-api-access-ld4kx\") pod \"openstack-operator-controller-init-8584c7c99f-224n7\" (UID: \"16ce971e-b375-4472-bbf9-6310b8524952\") " pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:42:42 crc kubenswrapper[4721]: I0130 21:42:42.951653 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld4kx\" (UniqueName: \"kubernetes.io/projected/16ce971e-b375-4472-bbf9-6310b8524952-kube-api-access-ld4kx\") pod \"openstack-operator-controller-init-8584c7c99f-224n7\" (UID: \"16ce971e-b375-4472-bbf9-6310b8524952\") " pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:42:43 crc kubenswrapper[4721]: I0130 21:42:43.099278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:42:43 crc kubenswrapper[4721]: I0130 21:42:43.596912 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7"] Jan 30 21:42:43 crc kubenswrapper[4721]: W0130 21:42:43.606599 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16ce971e_b375_4472_bbf9_6310b8524952.slice/crio-e269d964dcd9b937ea17a17b9f0b8627d23996f8eaabba11b29d22ab0567a544 WatchSource:0}: Error finding container e269d964dcd9b937ea17a17b9f0b8627d23996f8eaabba11b29d22ab0567a544: Status 404 returned error can't find the container with id e269d964dcd9b937ea17a17b9f0b8627d23996f8eaabba11b29d22ab0567a544 Jan 30 21:42:43 crc kubenswrapper[4721]: I0130 21:42:43.874593 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" event={"ID":"16ce971e-b375-4472-bbf9-6310b8524952","Type":"ContainerStarted","Data":"e269d964dcd9b937ea17a17b9f0b8627d23996f8eaabba11b29d22ab0567a544"} Jan 30 21:42:48 crc kubenswrapper[4721]: I0130 21:42:48.920927 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" event={"ID":"16ce971e-b375-4472-bbf9-6310b8524952","Type":"ContainerStarted","Data":"d84a5075228975df42138d5d9b05c932624e3b5cdea1a3bfdffee6d84552fede"} Jan 30 21:42:48 crc kubenswrapper[4721]: I0130 21:42:48.922096 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:42:48 crc kubenswrapper[4721]: I0130 21:42:48.967542 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" podStartSLOduration=2.754498591 podStartE2EDuration="6.96751735s" podCreationTimestamp="2026-01-30 21:42:42 +0000 UTC" firstStartedPulling="2026-01-30 21:42:43.611231866 +0000 UTC m=+1552.403133122" lastFinishedPulling="2026-01-30 21:42:47.824250635 +0000 UTC m=+1556.616151881" observedRunningTime="2026-01-30 21:42:48.965511889 +0000 UTC m=+1557.757413185" watchObservedRunningTime="2026-01-30 21:42:48.96751735 +0000 UTC m=+1557.759418606" Jan 30 21:42:51 crc kubenswrapper[4721]: I0130 21:42:51.092055 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:42:51 crc kubenswrapper[4721]: E0130 21:42:51.092373 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:42:53 crc kubenswrapper[4721]: I0130 21:42:53.103190 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-8584c7c99f-224n7" Jan 30 21:43:06 crc kubenswrapper[4721]: I0130 21:43:06.092814 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:43:06 crc kubenswrapper[4721]: E0130 21:43:06.093620 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.063629 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.065992 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.069330 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-26584" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.072432 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.073819 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.076147 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-jkpz9" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.080778 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.107353 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.151776 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.165900 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.170741 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-6g9v4" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.171493 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.185666 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8qln\" (UniqueName: \"kubernetes.io/projected/ce8df3e5-ac5d-4782-97fe-b49e9342768a-kube-api-access-c8qln\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-qhpvg\" (UID: \"ce8df3e5-ac5d-4782-97fe-b49e9342768a\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.185837 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl7qb\" (UniqueName: \"kubernetes.io/projected/6dfaa0a8-aa69-4d52-8740-b1098802644c-kube-api-access-sl7qb\") pod \"cinder-operator-controller-manager-8d874c8fc-ddmz2\" (UID: \"6dfaa0a8-aa69-4d52-8740-b1098802644c\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.209365 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.211185 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.214215 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-mkdzm" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.270071 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.273381 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.274574 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.279403 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-m4s9z" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.284079 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.287506 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwh7v\" (UniqueName: \"kubernetes.io/projected/e5aed1e3-eebf-4e1b-ab1b-1b81b337374e-kube-api-access-xwh7v\") pod \"designate-operator-controller-manager-6d9697b7f4-77cms\" (UID: \"e5aed1e3-eebf-4e1b-ab1b-1b81b337374e\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.287631 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl7qb\" (UniqueName: \"kubernetes.io/projected/6dfaa0a8-aa69-4d52-8740-b1098802644c-kube-api-access-sl7qb\") pod \"cinder-operator-controller-manager-8d874c8fc-ddmz2\" (UID: \"6dfaa0a8-aa69-4d52-8740-b1098802644c\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.287670 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8qln\" (UniqueName: \"kubernetes.io/projected/ce8df3e5-ac5d-4782-97fe-b49e9342768a-kube-api-access-c8qln\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-qhpvg\" (UID: \"ce8df3e5-ac5d-4782-97fe-b49e9342768a\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.298097 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-r42gp"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.299411 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.302323 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-snx7s" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.302370 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.326012 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.327122 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.335806 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-8c4sr" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.341648 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-r42gp"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.364383 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.365656 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.373679 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8qln\" (UniqueName: \"kubernetes.io/projected/ce8df3e5-ac5d-4782-97fe-b49e9342768a-kube-api-access-c8qln\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-qhpvg\" (UID: \"ce8df3e5-ac5d-4782-97fe-b49e9342768a\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.373764 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-4zdrn" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.390342 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.391738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbm94\" (UniqueName: \"kubernetes.io/projected/6a148cdc-0a77-4f57-b5e6-1b2acf90a900-kube-api-access-xbm94\") pod \"heat-operator-controller-manager-69d6db494d-fvxk2\" (UID: \"6a148cdc-0a77-4f57-b5e6-1b2acf90a900\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.391775 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrh99\" (UniqueName: \"kubernetes.io/projected/2dbdf4c9-4962-45ea-ac32-adbb848529d7-kube-api-access-xrh99\") pod \"glance-operator-controller-manager-8886f4c47-snvcj\" (UID: \"2dbdf4c9-4962-45ea-ac32-adbb848529d7\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.391810 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwh7v\" (UniqueName: \"kubernetes.io/projected/e5aed1e3-eebf-4e1b-ab1b-1b81b337374e-kube-api-access-xwh7v\") pod \"designate-operator-controller-manager-6d9697b7f4-77cms\" (UID: \"e5aed1e3-eebf-4e1b-ab1b-1b81b337374e\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.392828 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.396068 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl7qb\" (UniqueName: \"kubernetes.io/projected/6dfaa0a8-aa69-4d52-8740-b1098802644c-kube-api-access-sl7qb\") pod \"cinder-operator-controller-manager-8d874c8fc-ddmz2\" (UID: \"6dfaa0a8-aa69-4d52-8740-b1098802644c\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.402633 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.454368 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.455424 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwh7v\" (UniqueName: \"kubernetes.io/projected/e5aed1e3-eebf-4e1b-ab1b-1b81b337374e-kube-api-access-xwh7v\") pod \"designate-operator-controller-manager-6d9697b7f4-77cms\" (UID: \"e5aed1e3-eebf-4e1b-ab1b-1b81b337374e\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.473508 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.474815 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.486987 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-c4v4h" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.487533 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.492717 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6xjd\" (UniqueName: \"kubernetes.io/projected/b10d3fdb-7237-4461-ba03-ed926092791f-kube-api-access-r6xjd\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.492796 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddv6z\" (UniqueName: \"kubernetes.io/projected/782f1962-bc39-4162-84ae-acad49911f45-kube-api-access-ddv6z\") pod \"ironic-operator-controller-manager-5f4b8bd54d-mbz59\" (UID: \"782f1962-bc39-4162-84ae-acad49911f45\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.492821 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djsmv\" (UniqueName: \"kubernetes.io/projected/b6d746e4-3768-42df-956a-c700072e4e4c-kube-api-access-djsmv\") pod \"horizon-operator-controller-manager-5fb775575f-m79fw\" (UID: \"b6d746e4-3768-42df-956a-c700072e4e4c\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.492864 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbm94\" (UniqueName: \"kubernetes.io/projected/6a148cdc-0a77-4f57-b5e6-1b2acf90a900-kube-api-access-xbm94\") pod \"heat-operator-controller-manager-69d6db494d-fvxk2\" (UID: \"6a148cdc-0a77-4f57-b5e6-1b2acf90a900\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.492883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrh99\" (UniqueName: \"kubernetes.io/projected/2dbdf4c9-4962-45ea-ac32-adbb848529d7-kube-api-access-xrh99\") pod \"glance-operator-controller-manager-8886f4c47-snvcj\" (UID: \"2dbdf4c9-4962-45ea-ac32-adbb848529d7\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.492917 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.520490 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrh99\" (UniqueName: \"kubernetes.io/projected/2dbdf4c9-4962-45ea-ac32-adbb848529d7-kube-api-access-xrh99\") pod \"glance-operator-controller-manager-8886f4c47-snvcj\" (UID: \"2dbdf4c9-4962-45ea-ac32-adbb848529d7\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.521256 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.529605 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.530751 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbm94\" (UniqueName: \"kubernetes.io/projected/6a148cdc-0a77-4f57-b5e6-1b2acf90a900-kube-api-access-xbm94\") pod \"heat-operator-controller-manager-69d6db494d-fvxk2\" (UID: \"6a148cdc-0a77-4f57-b5e6-1b2acf90a900\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.530826 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.536276 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-l6qr8" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.537734 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.539265 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.540924 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-zm858" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.542835 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.545176 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.558618 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.600139 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.600936 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddv6z\" (UniqueName: \"kubernetes.io/projected/782f1962-bc39-4162-84ae-acad49911f45-kube-api-access-ddv6z\") pod \"ironic-operator-controller-manager-5f4b8bd54d-mbz59\" (UID: \"782f1962-bc39-4162-84ae-acad49911f45\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.601021 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djsmv\" (UniqueName: \"kubernetes.io/projected/b6d746e4-3768-42df-956a-c700072e4e4c-kube-api-access-djsmv\") pod \"horizon-operator-controller-manager-5fb775575f-m79fw\" (UID: \"b6d746e4-3768-42df-956a-c700072e4e4c\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.601068 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47947\" (UniqueName: \"kubernetes.io/projected/7d5fad49-066d-48d6-a9f0-0c3a105df525-kube-api-access-47947\") pod \"keystone-operator-controller-manager-84f48565d4-jztwj\" (UID: \"7d5fad49-066d-48d6-a9f0-0c3a105df525\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.601122 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.601167 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6xjd\" (UniqueName: \"kubernetes.io/projected/b10d3fdb-7237-4461-ba03-ed926092791f-kube-api-access-r6xjd\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.601408 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:14 crc kubenswrapper[4721]: E0130 21:43:14.601602 4721 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:14 crc kubenswrapper[4721]: E0130 21:43:14.601675 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert podName:b10d3fdb-7237-4461-ba03-ed926092791f nodeName:}" failed. No retries permitted until 2026-01-30 21:43:15.101650235 +0000 UTC m=+1583.893551481 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert") pod "infra-operator-controller-manager-79955696d6-r42gp" (UID: "b10d3fdb-7237-4461-ba03-ed926092791f") : secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.601857 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.607935 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-mt6rs" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.625907 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6xjd\" (UniqueName: \"kubernetes.io/projected/b10d3fdb-7237-4461-ba03-ed926092791f-kube-api-access-r6xjd\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.626024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddv6z\" (UniqueName: \"kubernetes.io/projected/782f1962-bc39-4162-84ae-acad49911f45-kube-api-access-ddv6z\") pod \"ironic-operator-controller-manager-5f4b8bd54d-mbz59\" (UID: \"782f1962-bc39-4162-84ae-acad49911f45\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.630470 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.631595 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.656133 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-4mxkp" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.676887 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djsmv\" (UniqueName: \"kubernetes.io/projected/b6d746e4-3768-42df-956a-c700072e4e4c-kube-api-access-djsmv\") pod \"horizon-operator-controller-manager-5fb775575f-m79fw\" (UID: \"b6d746e4-3768-42df-956a-c700072e4e4c\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.697851 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.706328 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5d78\" (UniqueName: \"kubernetes.io/projected/3d3b8ade-729b-4dfc-9ae8-ead1999f9657-kube-api-access-k5d78\") pod \"nova-operator-controller-manager-55bff696bd-qhqfv\" (UID: \"3d3b8ade-729b-4dfc-9ae8-ead1999f9657\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.706402 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vns2x\" (UniqueName: \"kubernetes.io/projected/319096ad-d67b-4344-8bb2-290aafd57bc0-kube-api-access-vns2x\") pod \"neutron-operator-controller-manager-585dbc889-j5m4f\" (UID: \"319096ad-d67b-4344-8bb2-290aafd57bc0\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.706435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47947\" (UniqueName: \"kubernetes.io/projected/7d5fad49-066d-48d6-a9f0-0c3a105df525-kube-api-access-47947\") pod \"keystone-operator-controller-manager-84f48565d4-jztwj\" (UID: \"7d5fad49-066d-48d6-a9f0-0c3a105df525\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.706641 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsvxn\" (UniqueName: \"kubernetes.io/projected/e7dd1fd7-f720-45b2-86b4-bc056b1ef360-kube-api-access-zsvxn\") pod \"mariadb-operator-controller-manager-67bf948998-6qxsd\" (UID: \"e7dd1fd7-f720-45b2-86b4-bc056b1ef360\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.706762 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwzsk\" (UniqueName: \"kubernetes.io/projected/64d5e6e7-b654-4060-9ba5-82e52e172a3b-kube-api-access-hwzsk\") pod \"manila-operator-controller-manager-7dd968899f-scnz2\" (UID: \"64d5e6e7-b654-4060-9ba5-82e52e172a3b\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.707040 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.732551 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47947\" (UniqueName: \"kubernetes.io/projected/7d5fad49-066d-48d6-a9f0-0c3a105df525-kube-api-access-47947\") pod \"keystone-operator-controller-manager-84f48565d4-jztwj\" (UID: \"7d5fad49-066d-48d6-a9f0-0c3a105df525\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.737643 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.773243 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.774544 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.777663 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-5kdmn" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.809920 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsvxn\" (UniqueName: \"kubernetes.io/projected/e7dd1fd7-f720-45b2-86b4-bc056b1ef360-kube-api-access-zsvxn\") pod \"mariadb-operator-controller-manager-67bf948998-6qxsd\" (UID: \"e7dd1fd7-f720-45b2-86b4-bc056b1ef360\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.809968 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwzsk\" (UniqueName: \"kubernetes.io/projected/64d5e6e7-b654-4060-9ba5-82e52e172a3b-kube-api-access-hwzsk\") pod \"manila-operator-controller-manager-7dd968899f-scnz2\" (UID: \"64d5e6e7-b654-4060-9ba5-82e52e172a3b\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.810046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5d78\" (UniqueName: \"kubernetes.io/projected/3d3b8ade-729b-4dfc-9ae8-ead1999f9657-kube-api-access-k5d78\") pod \"nova-operator-controller-manager-55bff696bd-qhqfv\" (UID: \"3d3b8ade-729b-4dfc-9ae8-ead1999f9657\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.810082 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vns2x\" (UniqueName: \"kubernetes.io/projected/319096ad-d67b-4344-8bb2-290aafd57bc0-kube-api-access-vns2x\") pod \"neutron-operator-controller-manager-585dbc889-j5m4f\" (UID: \"319096ad-d67b-4344-8bb2-290aafd57bc0\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.838646 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsvxn\" (UniqueName: \"kubernetes.io/projected/e7dd1fd7-f720-45b2-86b4-bc056b1ef360-kube-api-access-zsvxn\") pod \"mariadb-operator-controller-manager-67bf948998-6qxsd\" (UID: \"e7dd1fd7-f720-45b2-86b4-bc056b1ef360\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.854188 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5d78\" (UniqueName: \"kubernetes.io/projected/3d3b8ade-729b-4dfc-9ae8-ead1999f9657-kube-api-access-k5d78\") pod \"nova-operator-controller-manager-55bff696bd-qhqfv\" (UID: \"3d3b8ade-729b-4dfc-9ae8-ead1999f9657\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.855974 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.866702 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwzsk\" (UniqueName: \"kubernetes.io/projected/64d5e6e7-b654-4060-9ba5-82e52e172a3b-kube-api-access-hwzsk\") pod \"manila-operator-controller-manager-7dd968899f-scnz2\" (UID: \"64d5e6e7-b654-4060-9ba5-82e52e172a3b\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.868868 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.881050 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf"] Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.881108 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vns2x\" (UniqueName: \"kubernetes.io/projected/319096ad-d67b-4344-8bb2-290aafd57bc0-kube-api-access-vns2x\") pod \"neutron-operator-controller-manager-585dbc889-j5m4f\" (UID: \"319096ad-d67b-4344-8bb2-290aafd57bc0\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.882148 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.898859 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.899233 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-gtb66" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.909257 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.918830 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfmnt\" (UniqueName: \"kubernetes.io/projected/c53f8f24-7f92-4255-ad09-8a729b4159ab-kube-api-access-dfmnt\") pod \"octavia-operator-controller-manager-6687f8d877-2cmth\" (UID: \"c53f8f24-7f92-4255-ad09-8a729b4159ab\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:43:14 crc kubenswrapper[4721]: I0130 21:43:14.919183 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.004057 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.024931 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq5s2\" (UniqueName: \"kubernetes.io/projected/34543b19-ae6c-4a39-ad40-0dff196f0fd6-kube-api-access-pq5s2\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.025034 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.025145 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfmnt\" (UniqueName: \"kubernetes.io/projected/c53f8f24-7f92-4255-ad09-8a729b4159ab-kube-api-access-dfmnt\") pod \"octavia-operator-controller-manager-6687f8d877-2cmth\" (UID: \"c53f8f24-7f92-4255-ad09-8a729b4159ab\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.038113 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.039369 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.040966 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.041010 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.041773 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.043078 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.048119 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-6g6wv" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.048579 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-xs98t" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.049832 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-59sk2" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.068282 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.068941 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.080433 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.138470 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.138610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq5s2\" (UniqueName: \"kubernetes.io/projected/34543b19-ae6c-4a39-ad40-0dff196f0fd6-kube-api-access-pq5s2\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.138649 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:15 crc kubenswrapper[4721]: E0130 21:43:15.138838 4721 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:15 crc kubenswrapper[4721]: E0130 21:43:15.138911 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert podName:34543b19-ae6c-4a39-ad40-0dff196f0fd6 nodeName:}" failed. No retries permitted until 2026-01-30 21:43:15.638887573 +0000 UTC m=+1584.430788819 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" (UID: "34543b19-ae6c-4a39-ad40-0dff196f0fd6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:15 crc kubenswrapper[4721]: E0130 21:43:15.138961 4721 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:15 crc kubenswrapper[4721]: E0130 21:43:15.138984 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert podName:b10d3fdb-7237-4461-ba03-ed926092791f nodeName:}" failed. No retries permitted until 2026-01-30 21:43:16.138977306 +0000 UTC m=+1584.930878552 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert") pod "infra-operator-controller-manager-79955696d6-r42gp" (UID: "b10d3fdb-7237-4461-ba03-ed926092791f") : secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.146613 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.177998 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfmnt\" (UniqueName: \"kubernetes.io/projected/c53f8f24-7f92-4255-ad09-8a729b4159ab-kube-api-access-dfmnt\") pod \"octavia-operator-controller-manager-6687f8d877-2cmth\" (UID: \"c53f8f24-7f92-4255-ad09-8a729b4159ab\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.185449 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.204138 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq5s2\" (UniqueName: \"kubernetes.io/projected/34543b19-ae6c-4a39-ad40-0dff196f0fd6-kube-api-access-pq5s2\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.263291 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t92q5\" (UniqueName: \"kubernetes.io/projected/d4433a61-fd64-4240-8a12-8d86a8a52e77-kube-api-access-t92q5\") pod \"placement-operator-controller-manager-5b964cf4cd-kgpcd\" (UID: \"d4433a61-fd64-4240-8a12-8d86a8a52e77\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.289198 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gkvv\" (UniqueName: \"kubernetes.io/projected/d9650011-7842-4b52-bf3b-728e40294cb4-kube-api-access-8gkvv\") pod \"swift-operator-controller-manager-68fc8c869-6ptfc\" (UID: \"d9650011-7842-4b52-bf3b-728e40294cb4\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.290159 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bznsh\" (UniqueName: \"kubernetes.io/projected/5be9ffe8-a1a4-4aa1-a704-5443e1ef640b-kube-api-access-bznsh\") pod \"ovn-operator-controller-manager-788c46999f-x44jj\" (UID: \"5be9ffe8-a1a4-4aa1-a704-5443e1ef640b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.347681 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.348951 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.357176 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-xl5hd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.361484 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.392871 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bznsh\" (UniqueName: \"kubernetes.io/projected/5be9ffe8-a1a4-4aa1-a704-5443e1ef640b-kube-api-access-bznsh\") pod \"ovn-operator-controller-manager-788c46999f-x44jj\" (UID: \"5be9ffe8-a1a4-4aa1-a704-5443e1ef640b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.392976 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t92q5\" (UniqueName: \"kubernetes.io/projected/d4433a61-fd64-4240-8a12-8d86a8a52e77-kube-api-access-t92q5\") pod \"placement-operator-controller-manager-5b964cf4cd-kgpcd\" (UID: \"d4433a61-fd64-4240-8a12-8d86a8a52e77\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.393017 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvzzm\" (UniqueName: \"kubernetes.io/projected/54f2f57d-0269-4ba8-94f5-04873f29e16c-kube-api-access-nvzzm\") pod \"telemetry-operator-controller-manager-fd77b8dd7-4t9rw\" (UID: \"54f2f57d-0269-4ba8-94f5-04873f29e16c\") " pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.393059 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gkvv\" (UniqueName: \"kubernetes.io/projected/d9650011-7842-4b52-bf3b-728e40294cb4-kube-api-access-8gkvv\") pod \"swift-operator-controller-manager-68fc8c869-6ptfc\" (UID: \"d9650011-7842-4b52-bf3b-728e40294cb4\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.392875 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.429428 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.436453 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bznsh\" (UniqueName: \"kubernetes.io/projected/5be9ffe8-a1a4-4aa1-a704-5443e1ef640b-kube-api-access-bznsh\") pod \"ovn-operator-controller-manager-788c46999f-x44jj\" (UID: \"5be9ffe8-a1a4-4aa1-a704-5443e1ef640b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.458825 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t92q5\" (UniqueName: \"kubernetes.io/projected/d4433a61-fd64-4240-8a12-8d86a8a52e77-kube-api-access-t92q5\") pod \"placement-operator-controller-manager-5b964cf4cd-kgpcd\" (UID: \"d4433a61-fd64-4240-8a12-8d86a8a52e77\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.459478 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gkvv\" (UniqueName: \"kubernetes.io/projected/d9650011-7842-4b52-bf3b-728e40294cb4-kube-api-access-8gkvv\") pod \"swift-operator-controller-manager-68fc8c869-6ptfc\" (UID: \"d9650011-7842-4b52-bf3b-728e40294cb4\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.465842 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.466990 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.489317 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-tkdlk" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.491761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.493471 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bncps\" (UniqueName: \"kubernetes.io/projected/6e34b8b8-6fc5-4444-b957-b6325671ec2a-kube-api-access-bncps\") pod \"test-operator-controller-manager-56f8bfcd9f-jktzs\" (UID: \"6e34b8b8-6fc5-4444-b957-b6325671ec2a\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.493535 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvzzm\" (UniqueName: \"kubernetes.io/projected/54f2f57d-0269-4ba8-94f5-04873f29e16c-kube-api-access-nvzzm\") pod \"telemetry-operator-controller-manager-fd77b8dd7-4t9rw\" (UID: \"54f2f57d-0269-4ba8-94f5-04873f29e16c\") " pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.560105 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.579561 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvzzm\" (UniqueName: \"kubernetes.io/projected/54f2f57d-0269-4ba8-94f5-04873f29e16c-kube-api-access-nvzzm\") pod \"telemetry-operator-controller-manager-fd77b8dd7-4t9rw\" (UID: \"54f2f57d-0269-4ba8-94f5-04873f29e16c\") " pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.589876 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.593711 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.594494 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bncps\" (UniqueName: \"kubernetes.io/projected/6e34b8b8-6fc5-4444-b957-b6325671ec2a-kube-api-access-bncps\") pod \"test-operator-controller-manager-56f8bfcd9f-jktzs\" (UID: \"6e34b8b8-6fc5-4444-b957-b6325671ec2a\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.637919 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.679765 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bncps\" (UniqueName: \"kubernetes.io/projected/6e34b8b8-6fc5-4444-b957-b6325671ec2a-kube-api-access-bncps\") pod \"test-operator-controller-manager-56f8bfcd9f-jktzs\" (UID: \"6e34b8b8-6fc5-4444-b957-b6325671ec2a\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.698465 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-c2c8k"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.702558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:15 crc kubenswrapper[4721]: E0130 21:43:15.702753 4721 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:15 crc kubenswrapper[4721]: E0130 21:43:15.702836 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert podName:34543b19-ae6c-4a39-ad40-0dff196f0fd6 nodeName:}" failed. No retries permitted until 2026-01-30 21:43:16.702808327 +0000 UTC m=+1585.494709573 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" (UID: "34543b19-ae6c-4a39-ad40-0dff196f0fd6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.706586 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.723352 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-c2c8k"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.724565 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-kws9z" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.755364 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.756882 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.759418 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.764764 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.764834 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-dfhwh" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.771477 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.787734 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.790196 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.794876 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-2vqzv" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.798817 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.804593 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-522n6\" (UniqueName: \"kubernetes.io/projected/bca228e2-5d0f-415b-943b-530f9291396a-kube-api-access-522n6\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4r7sw\" (UID: \"bca228e2-5d0f-415b-943b-530f9291396a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.845831 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.859121 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.868184 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg"] Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.905772 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.905839 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-522n6\" (UniqueName: \"kubernetes.io/projected/bca228e2-5d0f-415b-943b-530f9291396a-kube-api-access-522n6\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4r7sw\" (UID: \"bca228e2-5d0f-415b-943b-530f9291396a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.905866 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9rtj\" (UniqueName: \"kubernetes.io/projected/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-kube-api-access-c9rtj\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.906082 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndtsp\" (UniqueName: \"kubernetes.io/projected/e11d1820-45a9-4ecc-b400-7bbcb6f8b69e-kube-api-access-ndtsp\") pod \"watcher-operator-controller-manager-564965969-c2c8k\" (UID: \"e11d1820-45a9-4ecc-b400-7bbcb6f8b69e\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.906210 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.919831 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.965467 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-522n6\" (UniqueName: \"kubernetes.io/projected/bca228e2-5d0f-415b-943b-530f9291396a-kube-api-access-522n6\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4r7sw\" (UID: \"bca228e2-5d0f-415b-943b-530f9291396a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" Jan 30 21:43:15 crc kubenswrapper[4721]: I0130 21:43:15.972420 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.044695 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.045327 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.045413 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9rtj\" (UniqueName: \"kubernetes.io/projected/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-kube-api-access-c9rtj\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.045461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndtsp\" (UniqueName: \"kubernetes.io/projected/e11d1820-45a9-4ecc-b400-7bbcb6f8b69e-kube-api-access-ndtsp\") pod \"watcher-operator-controller-manager-564965969-c2c8k\" (UID: \"e11d1820-45a9-4ecc-b400-7bbcb6f8b69e\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.045950 4721 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.046000 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:16.545979379 +0000 UTC m=+1585.337880625 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "metrics-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.046210 4721 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.046238 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:16.546226257 +0000 UTC m=+1585.338127503 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.052481 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.086350 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9rtj\" (UniqueName: \"kubernetes.io/projected/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-kube-api-access-c9rtj\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.090037 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndtsp\" (UniqueName: \"kubernetes.io/projected/e11d1820-45a9-4ecc-b400-7bbcb6f8b69e-kube-api-access-ndtsp\") pod \"watcher-operator-controller-manager-564965969-c2c8k\" (UID: \"e11d1820-45a9-4ecc-b400-7bbcb6f8b69e\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.093917 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.147431 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.147669 4721 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.147743 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert podName:b10d3fdb-7237-4461-ba03-ed926092791f nodeName:}" failed. No retries permitted until 2026-01-30 21:43:18.147724088 +0000 UTC m=+1586.939625334 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert") pod "infra-operator-controller-manager-79955696d6-r42gp" (UID: "b10d3fdb-7237-4461-ba03-ed926092791f") : secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.200449 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" event={"ID":"ce8df3e5-ac5d-4782-97fe-b49e9342768a","Type":"ContainerStarted","Data":"6067a517badcedc74c216376e2952ae1386e60e8f45d0635e6e0db48146bd165"} Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.210215 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" event={"ID":"6dfaa0a8-aa69-4d52-8740-b1098802644c","Type":"ContainerStarted","Data":"c56fd315e1c0de6bd24fb1493e8d9b3737032321079e284dec198c42cd77ce79"} Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.219624 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" event={"ID":"e5aed1e3-eebf-4e1b-ab1b-1b81b337374e","Type":"ContainerStarted","Data":"81d3e0ce131f09171429da32c90b2bddf60d78f2c91486cbb4107a62addfc5ee"} Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.228728 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" event={"ID":"b6d746e4-3768-42df-956a-c700072e4e4c","Type":"ContainerStarted","Data":"2c38976149f41d335eb3c137da03cf2eb4811f9644559db17fe6009025860830"} Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.292753 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.319573 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.556793 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.557468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.557667 4721 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.557748 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:17.557723668 +0000 UTC m=+1586.349624914 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "metrics-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.558264 4721 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.558332 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:17.558319907 +0000 UTC m=+1586.350221163 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.765760 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.766172 4721 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: E0130 21:43:16.766250 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert podName:34543b19-ae6c-4a39-ad40-0dff196f0fd6 nodeName:}" failed. No retries permitted until 2026-01-30 21:43:18.766227172 +0000 UTC m=+1587.558128418 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" (UID: "34543b19-ae6c-4a39-ad40-0dff196f0fd6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.777291 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.817887 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.855972 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.868904 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.935184 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.944093 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.949975 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.958982 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv"] Jan 30 21:43:16 crc kubenswrapper[4721]: I0130 21:43:16.976670 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth"] Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.127479 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-c2c8k"] Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.136504 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd"] Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.150475 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs"] Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.187970 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bncps,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-jktzs_openstack-operators(6e34b8b8-6fc5-4444-b957-b6325671ec2a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.188054 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ndtsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-c2c8k_openstack-operators(e11d1820-45a9-4ecc-b400-7bbcb6f8b69e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.189187 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" podUID="6e34b8b8-6fc5-4444-b957-b6325671ec2a" Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.189276 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" podUID="e11d1820-45a9-4ecc-b400-7bbcb6f8b69e" Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.223290 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj"] Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.223628 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw"] Jan 30 21:43:17 crc kubenswrapper[4721]: W0130 21:43:17.291478 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5be9ffe8_a1a4_4aa1_a704_5443e1ef640b.slice/crio-e565b1e3c4ae09dcb6fb90f58701dbe5a01164d67517a3dd343bb8de277da893 WatchSource:0}: Error finding container e565b1e3c4ae09dcb6fb90f58701dbe5a01164d67517a3dd343bb8de277da893: Status 404 returned error can't find the container with id e565b1e3c4ae09dcb6fb90f58701dbe5a01164d67517a3dd343bb8de277da893 Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.328822 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" event={"ID":"7d5fad49-066d-48d6-a9f0-0c3a105df525","Type":"ContainerStarted","Data":"e0c3aaaa723395c65ca8141a9d2426ac2f69ec29fc12da9748f78a9d66ba6797"} Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.340156 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bznsh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-788c46999f-x44jj_openstack-operators(5be9ffe8-a1a4-4aa1-a704-5443e1ef640b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.340263 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" event={"ID":"6e34b8b8-6fc5-4444-b957-b6325671ec2a","Type":"ContainerStarted","Data":"9c42e55b97b91cba6eae986d6ce90bddd9f74e412bd4e56252702b3b623a816d"} Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.341431 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" podUID="5be9ffe8-a1a4-4aa1-a704-5443e1ef640b" Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.358079 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" podUID="6e34b8b8-6fc5-4444-b957-b6325671ec2a" Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.412847 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" event={"ID":"d9650011-7842-4b52-bf3b-728e40294cb4","Type":"ContainerStarted","Data":"4d6e0e60b3eba68738fca448843a91fe6fe06e507507b9bed23346f42cb2514e"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.423707 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" event={"ID":"2dbdf4c9-4962-45ea-ac32-adbb848529d7","Type":"ContainerStarted","Data":"41b9cc8f5934928c441b6a015d8daeacb69eec3de5858d6bb7ff8aad15ed7876"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.436974 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" event={"ID":"e7dd1fd7-f720-45b2-86b4-bc056b1ef360","Type":"ContainerStarted","Data":"e0b139ddf8ebb383e59e5af41ac18f54218c0f6422e7fdde8b7f52d86a4dda9b"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.452396 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" event={"ID":"54f2f57d-0269-4ba8-94f5-04873f29e16c","Type":"ContainerStarted","Data":"7c1ca7c9dc0ad5c8cba9a9d82740ba8307fd213fc5d30cdfa9a02e1c54d90cbb"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.473668 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" event={"ID":"64d5e6e7-b654-4060-9ba5-82e52e172a3b","Type":"ContainerStarted","Data":"e7079faa616f253dbad4eec1e620d37b5010faaf55c3fbd9663bd4a48391a75a"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.475863 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" event={"ID":"319096ad-d67b-4344-8bb2-290aafd57bc0","Type":"ContainerStarted","Data":"a963bff6cfaba32da416095b3d3cfd73b926b9732d428d6756663e3603566261"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.490025 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" event={"ID":"e11d1820-45a9-4ecc-b400-7bbcb6f8b69e","Type":"ContainerStarted","Data":"1ba5590e9b8edc7a47b30ec25efb1dfde1c788560b3d69e945d1a03a2c474cbe"} Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.500017 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" podUID="e11d1820-45a9-4ecc-b400-7bbcb6f8b69e" Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.504583 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" event={"ID":"c53f8f24-7f92-4255-ad09-8a729b4159ab","Type":"ContainerStarted","Data":"72e89f9497a7f29b417c035797f402ac2f9844c3034d695860655ae437967a39"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.509900 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" event={"ID":"782f1962-bc39-4162-84ae-acad49911f45","Type":"ContainerStarted","Data":"a5824a8d5c42974c3903da768d518daaf1329349ed2734da21579ae1c20884f3"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.512766 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" event={"ID":"3d3b8ade-729b-4dfc-9ae8-ead1999f9657","Type":"ContainerStarted","Data":"c468421fc88f2b1fca9f2c4e64d6f96b9695cb050386b75039931734aff48929"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.515853 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" event={"ID":"6a148cdc-0a77-4f57-b5e6-1b2acf90a900","Type":"ContainerStarted","Data":"ae6fc5b98832e5da65ed7341415e28014065d4fe857a573a37c331bbb5b1e2b7"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.524799 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" event={"ID":"d4433a61-fd64-4240-8a12-8d86a8a52e77","Type":"ContainerStarted","Data":"a9c0390cb880871c51f059fb3a39bcb890e9cf5caf609f55c59314518c676e87"} Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.608929 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:17 crc kubenswrapper[4721]: I0130 21:43:17.609017 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.609173 4721 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.609241 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:19.609221494 +0000 UTC m=+1588.401122740 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "webhook-server-cert" not found Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.609422 4721 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 21:43:17 crc kubenswrapper[4721]: E0130 21:43:17.609455 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:19.609444451 +0000 UTC m=+1588.401345697 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "metrics-server-cert" not found Jan 30 21:43:18 crc kubenswrapper[4721]: I0130 21:43:18.222890 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.224141 4721 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.224198 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert podName:b10d3fdb-7237-4461-ba03-ed926092791f nodeName:}" failed. No retries permitted until 2026-01-30 21:43:22.224177807 +0000 UTC m=+1591.016079053 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert") pod "infra-operator-controller-manager-79955696d6-r42gp" (UID: "b10d3fdb-7237-4461-ba03-ed926092791f") : secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:18 crc kubenswrapper[4721]: I0130 21:43:18.554383 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" event={"ID":"5be9ffe8-a1a4-4aa1-a704-5443e1ef640b","Type":"ContainerStarted","Data":"e565b1e3c4ae09dcb6fb90f58701dbe5a01164d67517a3dd343bb8de277da893"} Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.558804 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" podUID="5be9ffe8-a1a4-4aa1-a704-5443e1ef640b" Jan 30 21:43:18 crc kubenswrapper[4721]: I0130 21:43:18.565565 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" event={"ID":"bca228e2-5d0f-415b-943b-530f9291396a","Type":"ContainerStarted","Data":"6b5179c6dda70a69ce6254326076f44acca2f33cf8254ddf3b6be968c081bde4"} Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.571956 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" podUID="e11d1820-45a9-4ecc-b400-7bbcb6f8b69e" Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.572566 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" podUID="6e34b8b8-6fc5-4444-b957-b6325671ec2a" Jan 30 21:43:18 crc kubenswrapper[4721]: I0130 21:43:18.859387 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.859679 4721 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:18 crc kubenswrapper[4721]: E0130 21:43:18.859756 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert podName:34543b19-ae6c-4a39-ad40-0dff196f0fd6 nodeName:}" failed. No retries permitted until 2026-01-30 21:43:22.859735389 +0000 UTC m=+1591.651636635 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" (UID: "34543b19-ae6c-4a39-ad40-0dff196f0fd6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:19 crc kubenswrapper[4721]: I0130 21:43:19.093049 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:43:19 crc kubenswrapper[4721]: E0130 21:43:19.093769 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:43:19 crc kubenswrapper[4721]: E0130 21:43:19.584646 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" podUID="5be9ffe8-a1a4-4aa1-a704-5443e1ef640b" Jan 30 21:43:19 crc kubenswrapper[4721]: E0130 21:43:19.675996 4721 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 21:43:19 crc kubenswrapper[4721]: E0130 21:43:19.676496 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:23.676458708 +0000 UTC m=+1592.468359954 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "webhook-server-cert" not found Jan 30 21:43:19 crc kubenswrapper[4721]: I0130 21:43:19.676430 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:19 crc kubenswrapper[4721]: I0130 21:43:19.676935 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:19 crc kubenswrapper[4721]: E0130 21:43:19.677209 4721 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 21:43:19 crc kubenswrapper[4721]: E0130 21:43:19.677261 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:23.677253322 +0000 UTC m=+1592.469154568 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "metrics-server-cert" not found Jan 30 21:43:22 crc kubenswrapper[4721]: I0130 21:43:22.224785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:22 crc kubenswrapper[4721]: E0130 21:43:22.225022 4721 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:22 crc kubenswrapper[4721]: E0130 21:43:22.232125 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert podName:b10d3fdb-7237-4461-ba03-ed926092791f nodeName:}" failed. No retries permitted until 2026-01-30 21:43:30.231960144 +0000 UTC m=+1599.023861410 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert") pod "infra-operator-controller-manager-79955696d6-r42gp" (UID: "b10d3fdb-7237-4461-ba03-ed926092791f") : secret "infra-operator-webhook-server-cert" not found Jan 30 21:43:22 crc kubenswrapper[4721]: I0130 21:43:22.944585 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:22 crc kubenswrapper[4721]: E0130 21:43:22.944817 4721 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:22 crc kubenswrapper[4721]: E0130 21:43:22.944927 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert podName:34543b19-ae6c-4a39-ad40-0dff196f0fd6 nodeName:}" failed. No retries permitted until 2026-01-30 21:43:30.944896891 +0000 UTC m=+1599.736798157 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" (UID: "34543b19-ae6c-4a39-ad40-0dff196f0fd6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 30 21:43:23 crc kubenswrapper[4721]: I0130 21:43:23.760607 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:23 crc kubenswrapper[4721]: I0130 21:43:23.760730 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:23 crc kubenswrapper[4721]: E0130 21:43:23.760972 4721 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 30 21:43:23 crc kubenswrapper[4721]: E0130 21:43:23.761092 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:31.761060902 +0000 UTC m=+1600.552962148 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "webhook-server-cert" not found Jan 30 21:43:23 crc kubenswrapper[4721]: E0130 21:43:23.761883 4721 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 30 21:43:23 crc kubenswrapper[4721]: E0130 21:43:23.762026 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs podName:1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c nodeName:}" failed. No retries permitted until 2026-01-30 21:43:31.761998642 +0000 UTC m=+1600.553899888 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs") pod "openstack-operator-controller-manager-57c48854c9-4r8wb" (UID: "1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c") : secret "metrics-server-cert" not found Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.135002 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4" Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.135903 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xrh99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-8886f4c47-snvcj_openstack-operators(2dbdf4c9-4962-45ea-ac32-adbb848529d7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.137338 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" podUID="2dbdf4c9-4962-45ea-ac32-adbb848529d7" Jan 30 21:43:30 crc kubenswrapper[4721]: I0130 21:43:30.318293 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:30 crc kubenswrapper[4721]: I0130 21:43:30.338671 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b10d3fdb-7237-4461-ba03-ed926092791f-cert\") pod \"infra-operator-controller-manager-79955696d6-r42gp\" (UID: \"b10d3fdb-7237-4461-ba03-ed926092791f\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:30 crc kubenswrapper[4721]: I0130 21:43:30.546789 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.691727 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4\\\"\"" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" podUID="2dbdf4c9-4962-45ea-ac32-adbb848529d7" Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.798384 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:27d83ada27cf70cda0c5738f97551d81f1ea4068e83a090f3312e22172d72e10" Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.799093 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:27d83ada27cf70cda0c5738f97551d81f1ea4068e83a090f3312e22172d72e10,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xbm94,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-69d6db494d-fvxk2_openstack-operators(6a148cdc-0a77-4f57-b5e6-1b2acf90a900): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:30 crc kubenswrapper[4721]: E0130 21:43:30.800557 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" podUID="6a148cdc-0a77-4f57-b5e6-1b2acf90a900" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.029076 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.036511 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/34543b19-ae6c-4a39-ad40-0dff196f0fd6-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf\" (UID: \"34543b19-ae6c-4a39-ad40-0dff196f0fd6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.092259 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:43:31 crc kubenswrapper[4721]: E0130 21:43:31.092579 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.188272 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:31 crc kubenswrapper[4721]: E0130 21:43:31.699729 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:27d83ada27cf70cda0c5738f97551d81f1ea4068e83a090f3312e22172d72e10\\\"\"" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" podUID="6a148cdc-0a77-4f57-b5e6-1b2acf90a900" Jan 30 21:43:31 crc kubenswrapper[4721]: E0130 21:43:31.774805 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be" Jan 30 21:43:31 crc kubenswrapper[4721]: E0130 21:43:31.775114 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dfmnt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-2cmth_openstack-operators(c53f8f24-7f92-4255-ad09-8a729b4159ab): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:31 crc kubenswrapper[4721]: E0130 21:43:31.776564 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" podUID="c53f8f24-7f92-4255-ad09-8a729b4159ab" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.844339 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.844446 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.856011 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-webhook-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:31 crc kubenswrapper[4721]: I0130 21:43:31.857340 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c-metrics-certs\") pod \"openstack-operator-controller-manager-57c48854c9-4r8wb\" (UID: \"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c\") " pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:32 crc kubenswrapper[4721]: I0130 21:43:32.072442 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:32 crc kubenswrapper[4721]: E0130 21:43:32.708031 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" podUID="c53f8f24-7f92-4255-ad09-8a729b4159ab" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.608982 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.610432 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8gkvv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-68fc8c869-6ptfc_openstack-operators(d9650011-7842-4b52-bf3b-728e40294cb4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.612351 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" podUID="d9650011-7842-4b52-bf3b-728e40294cb4" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.733215 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.733286 4721 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.733493 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nvzzm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-fd77b8dd7-4t9rw_openstack-operators(54f2f57d-0269-4ba8-94f5-04873f29e16c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.734947 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" podUID="54f2f57d-0269-4ba8-94f5-04873f29e16c" Jan 30 21:43:36 crc kubenswrapper[4721]: E0130 21:43:36.746402 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" podUID="d9650011-7842-4b52-bf3b-728e40294cb4" Jan 30 21:43:37 crc kubenswrapper[4721]: E0130 21:43:37.465060 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17" Jan 30 21:43:37 crc kubenswrapper[4721]: E0130 21:43:37.465267 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-47947,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-84f48565d4-jztwj_openstack-operators(7d5fad49-066d-48d6-a9f0-0c3a105df525): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:37 crc kubenswrapper[4721]: E0130 21:43:37.466628 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" podUID="7d5fad49-066d-48d6-a9f0-0c3a105df525" Jan 30 21:43:37 crc kubenswrapper[4721]: E0130 21:43:37.770935 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" podUID="54f2f57d-0269-4ba8-94f5-04873f29e16c" Jan 30 21:43:37 crc kubenswrapper[4721]: E0130 21:43:37.771599 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" podUID="7d5fad49-066d-48d6-a9f0-0c3a105df525" Jan 30 21:43:38 crc kubenswrapper[4721]: E0130 21:43:38.300036 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 30 21:43:38 crc kubenswrapper[4721]: E0130 21:43:38.301520 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-522n6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-4r7sw_openstack-operators(bca228e2-5d0f-415b-943b-530f9291396a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:38 crc kubenswrapper[4721]: E0130 21:43:38.302733 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" podUID="bca228e2-5d0f-415b-943b-530f9291396a" Jan 30 21:43:38 crc kubenswrapper[4721]: E0130 21:43:38.778370 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" podUID="bca228e2-5d0f-415b-943b-530f9291396a" Jan 30 21:43:39 crc kubenswrapper[4721]: E0130 21:43:39.077513 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e" Jan 30 21:43:39 crc kubenswrapper[4721]: E0130 21:43:39.077779 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k5d78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-55bff696bd-qhqfv_openstack-operators(3d3b8ade-729b-4dfc-9ae8-ead1999f9657): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:39 crc kubenswrapper[4721]: E0130 21:43:39.082565 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" podUID="3d3b8ade-729b-4dfc-9ae8-ead1999f9657" Jan 30 21:43:39 crc kubenswrapper[4721]: I0130 21:43:39.495206 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-r42gp"] Jan 30 21:43:39 crc kubenswrapper[4721]: E0130 21:43:39.786913 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" podUID="3d3b8ade-729b-4dfc-9ae8-ead1999f9657" Jan 30 21:43:42 crc kubenswrapper[4721]: I0130 21:43:42.097181 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:43:42 crc kubenswrapper[4721]: E0130 21:43:42.097756 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:43:48 crc kubenswrapper[4721]: I0130 21:43:48.888846 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" event={"ID":"b10d3fdb-7237-4461-ba03-ed926092791f","Type":"ContainerStarted","Data":"558c689ded795fd8671eb24807c2e128d951ec51efbf01a8fd9775ee18a456c4"} Jan 30 21:43:50 crc kubenswrapper[4721]: E0130 21:43:50.379650 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b" Jan 30 21:43:50 crc kubenswrapper[4721]: E0130 21:43:50.380339 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ndtsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-c2c8k_openstack-operators(e11d1820-45a9-4ecc-b400-7bbcb6f8b69e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:43:50 crc kubenswrapper[4721]: E0130 21:43:50.381533 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" podUID="e11d1820-45a9-4ecc-b400-7bbcb6f8b69e" Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.835271 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb"] Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.880170 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf"] Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.903723 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" event={"ID":"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c","Type":"ContainerStarted","Data":"adcd1e1d4fe726bf0c5340b99ab645bfea97debdba5723f58a6a1eadf1babfce"} Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.909530 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" event={"ID":"e7dd1fd7-f720-45b2-86b4-bc056b1ef360","Type":"ContainerStarted","Data":"0f5370defae36f7c4ee3fae257d39159e7ccee5ef4b1cbfa34e05044da55b659"} Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.909825 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.916447 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" event={"ID":"64d5e6e7-b654-4060-9ba5-82e52e172a3b","Type":"ContainerStarted","Data":"c90cf40fc679ab163b6aacc1c169e6b3e56325c979c13d38078db35c190a2b27"} Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.917264 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.919835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" event={"ID":"782f1962-bc39-4162-84ae-acad49911f45","Type":"ContainerStarted","Data":"59ca03930c09eafd97f53438b3a251d55b290d57de94367753ee682cfd380690"} Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.920694 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.922626 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" event={"ID":"319096ad-d67b-4344-8bb2-290aafd57bc0","Type":"ContainerStarted","Data":"c5900340cfd6d7c40abaa73675531742f8f49830d097e364cb75f623a4c63e27"} Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.923109 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.939834 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" podStartSLOduration=16.448358155 podStartE2EDuration="36.939817897s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.951012921 +0000 UTC m=+1585.742914167" lastFinishedPulling="2026-01-30 21:43:37.442472663 +0000 UTC m=+1606.234373909" observedRunningTime="2026-01-30 21:43:50.935537975 +0000 UTC m=+1619.727439221" watchObservedRunningTime="2026-01-30 21:43:50.939817897 +0000 UTC m=+1619.731719143" Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.941216 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" event={"ID":"ce8df3e5-ac5d-4782-97fe-b49e9342768a","Type":"ContainerStarted","Data":"eefb78e4ced5472c46caa7e6f72ded7cb28bd25eaf427d639aee7d7001763d0a"} Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.942101 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:43:50 crc kubenswrapper[4721]: W0130 21:43:50.951133 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34543b19_ae6c_4a39_ad40_0dff196f0fd6.slice/crio-763b1e95697a9a3ae7f03dcd154f792e1c1f090538af86338e66ba5c6c14bdb8 WatchSource:0}: Error finding container 763b1e95697a9a3ae7f03dcd154f792e1c1f090538af86338e66ba5c6c14bdb8: Status 404 returned error can't find the container with id 763b1e95697a9a3ae7f03dcd154f792e1c1f090538af86338e66ba5c6c14bdb8 Jan 30 21:43:50 crc kubenswrapper[4721]: I0130 21:43:50.977578 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" podStartSLOduration=15.072609275 podStartE2EDuration="36.977557746s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.385743426 +0000 UTC m=+1585.177644672" lastFinishedPulling="2026-01-30 21:43:38.290691897 +0000 UTC m=+1607.082593143" observedRunningTime="2026-01-30 21:43:50.970821657 +0000 UTC m=+1619.762722903" watchObservedRunningTime="2026-01-30 21:43:50.977557746 +0000 UTC m=+1619.769458992" Jan 30 21:43:51 crc kubenswrapper[4721]: I0130 21:43:51.023789 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" podStartSLOduration=15.565068885 podStartE2EDuration="37.023766745s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.832035368 +0000 UTC m=+1585.623936614" lastFinishedPulling="2026-01-30 21:43:38.290733218 +0000 UTC m=+1607.082634474" observedRunningTime="2026-01-30 21:43:50.989082081 +0000 UTC m=+1619.780983327" watchObservedRunningTime="2026-01-30 21:43:51.023766745 +0000 UTC m=+1619.815667991" Jan 30 21:43:51 crc kubenswrapper[4721]: I0130 21:43:51.052274 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" podStartSLOduration=17.120412887 podStartE2EDuration="37.052252147s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.808119399 +0000 UTC m=+1585.600020645" lastFinishedPulling="2026-01-30 21:43:36.739958659 +0000 UTC m=+1605.531859905" observedRunningTime="2026-01-30 21:43:51.027961566 +0000 UTC m=+1619.819862812" watchObservedRunningTime="2026-01-30 21:43:51.052252147 +0000 UTC m=+1619.844153403" Jan 30 21:43:51 crc kubenswrapper[4721]: I0130 21:43:51.060267 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" podStartSLOduration=16.195977633 podStartE2EDuration="37.060243904s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:15.875532203 +0000 UTC m=+1584.667433449" lastFinishedPulling="2026-01-30 21:43:36.739798474 +0000 UTC m=+1605.531699720" observedRunningTime="2026-01-30 21:43:51.050094051 +0000 UTC m=+1619.841995307" watchObservedRunningTime="2026-01-30 21:43:51.060243904 +0000 UTC m=+1619.852145150" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.012143 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" event={"ID":"e5aed1e3-eebf-4e1b-ab1b-1b81b337374e","Type":"ContainerStarted","Data":"b6344c063c9ca1f96eb9929350e2b69a1ce971df6d45f1cf2331577fbdb0bc1d"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.013864 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.049945 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" podStartSLOduration=15.573153036 podStartE2EDuration="38.049925557s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:15.813747711 +0000 UTC m=+1584.605648947" lastFinishedPulling="2026-01-30 21:43:38.290520222 +0000 UTC m=+1607.082421468" observedRunningTime="2026-01-30 21:43:52.047177742 +0000 UTC m=+1620.839078988" watchObservedRunningTime="2026-01-30 21:43:52.049925557 +0000 UTC m=+1620.841826803" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.069565 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" event={"ID":"54f2f57d-0269-4ba8-94f5-04873f29e16c","Type":"ContainerStarted","Data":"69a8eef73560eedd9ccb1d55b5eb22090fbda4c86686d3f511631d1b09c0b999"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.070535 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.124339 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.124387 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" event={"ID":"d4433a61-fd64-4240-8a12-8d86a8a52e77","Type":"ContainerStarted","Data":"0ffdcf260b04ef0b394c8da25cc32f2707ba1cf888eac43f7a0378dcd3de509f"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.124777 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" event={"ID":"6e34b8b8-6fc5-4444-b957-b6325671ec2a","Type":"ContainerStarted","Data":"0f22383b42eef8f2b2d59698d9afd21819459b6f3fec163bcd46cd92d6e7f689"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.125407 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.125343 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" podStartSLOduration=4.523109191 podStartE2EDuration="38.125238338s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.986572332 +0000 UTC m=+1585.778473578" lastFinishedPulling="2026-01-30 21:43:50.588701479 +0000 UTC m=+1619.380602725" observedRunningTime="2026-01-30 21:43:52.099684837 +0000 UTC m=+1620.891586073" watchObservedRunningTime="2026-01-30 21:43:52.125238338 +0000 UTC m=+1620.917139584" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.158094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" event={"ID":"b6d746e4-3768-42df-956a-c700072e4e4c","Type":"ContainerStarted","Data":"cd8abb7d3d9fb42656530aa250df8f21d1b46936c5e2221d534562a7578d2d98"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.159277 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.195915 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" event={"ID":"5be9ffe8-a1a4-4aa1-a704-5443e1ef640b","Type":"ContainerStarted","Data":"915a335c7deb521230b1bb86e11ba2d256763dcf451d848f1b368375a2ea4ad3"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.197721 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.224079 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" event={"ID":"d9650011-7842-4b52-bf3b-728e40294cb4","Type":"ContainerStarted","Data":"e514c8e478324ec048ab48da35e49d9ca04869c12172fbad3399658700643108"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.224876 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.257098 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" podStartSLOduration=5.240230736 podStartE2EDuration="38.257080008s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:17.339882557 +0000 UTC m=+1586.131783803" lastFinishedPulling="2026-01-30 21:43:50.356731829 +0000 UTC m=+1619.148633075" observedRunningTime="2026-01-30 21:43:52.255788438 +0000 UTC m=+1621.047689684" watchObservedRunningTime="2026-01-30 21:43:52.257080008 +0000 UTC m=+1621.048981254" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.271684 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" event={"ID":"c53f8f24-7f92-4255-ad09-8a729b4159ab","Type":"ContainerStarted","Data":"1046eea6e4895a15acd4c304fedf7e836f6fa4f708fd83790da008e4c6d50986"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.272359 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.288833 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" podStartSLOduration=17.676727775 podStartE2EDuration="38.28881424s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.125341426 +0000 UTC m=+1584.917242672" lastFinishedPulling="2026-01-30 21:43:36.737427881 +0000 UTC m=+1605.529329137" observedRunningTime="2026-01-30 21:43:52.284136736 +0000 UTC m=+1621.076037982" watchObservedRunningTime="2026-01-30 21:43:52.28881424 +0000 UTC m=+1621.080715486" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.294563 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" event={"ID":"2dbdf4c9-4962-45ea-ac32-adbb848529d7","Type":"ContainerStarted","Data":"f789788880c942e2e64c908be4a9ded614f824249216567413d4957c8bb88c0e"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.295400 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.306964 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" event={"ID":"6dfaa0a8-aa69-4d52-8740-b1098802644c","Type":"ContainerStarted","Data":"bb985601e8530ac19a3eed5db097b7ae84d1eab95e8aa8a4dfcad7b655c41490"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.307433 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.308454 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" event={"ID":"34543b19-ae6c-4a39-ad40-0dff196f0fd6","Type":"ContainerStarted","Data":"763b1e95697a9a3ae7f03dcd154f792e1c1f090538af86338e66ba5c6c14bdb8"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.327835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" event={"ID":"6a148cdc-0a77-4f57-b5e6-1b2acf90a900","Type":"ContainerStarted","Data":"b9577a319dd071f78c46b4df23c02e7400af7859d540a9748f7201e16430705b"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.328284 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.330758 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" podStartSLOduration=4.973436797 podStartE2EDuration="38.330734127s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:17.187743278 +0000 UTC m=+1585.979644524" lastFinishedPulling="2026-01-30 21:43:50.545040608 +0000 UTC m=+1619.336941854" observedRunningTime="2026-01-30 21:43:52.313816004 +0000 UTC m=+1621.105717250" watchObservedRunningTime="2026-01-30 21:43:52.330734127 +0000 UTC m=+1621.122635373" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.347280 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" podStartSLOduration=16.485327118 podStartE2EDuration="38.347265549s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:17.184602931 +0000 UTC m=+1585.976504177" lastFinishedPulling="2026-01-30 21:43:39.046541362 +0000 UTC m=+1607.838442608" observedRunningTime="2026-01-30 21:43:52.345767923 +0000 UTC m=+1621.137669169" watchObservedRunningTime="2026-01-30 21:43:52.347265549 +0000 UTC m=+1621.139166795" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.355795 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" event={"ID":"1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c","Type":"ContainerStarted","Data":"8277ddfefc5f3a8d0d7bbdb2025a36e2bccfab3fc1549b0a4db741521ca8befb"} Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.355848 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.388816 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" podStartSLOduration=4.714071218 podStartE2EDuration="38.388793824s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.865475253 +0000 UTC m=+1585.657376499" lastFinishedPulling="2026-01-30 21:43:50.540197859 +0000 UTC m=+1619.332099105" observedRunningTime="2026-01-30 21:43:52.381766507 +0000 UTC m=+1621.173667753" watchObservedRunningTime="2026-01-30 21:43:52.388793824 +0000 UTC m=+1621.180695070" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.411541 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" podStartSLOduration=4.833532537 podStartE2EDuration="38.411522818s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.961290409 +0000 UTC m=+1585.753191655" lastFinishedPulling="2026-01-30 21:43:50.53928069 +0000 UTC m=+1619.331181936" observedRunningTime="2026-01-30 21:43:52.407927197 +0000 UTC m=+1621.199828443" watchObservedRunningTime="2026-01-30 21:43:52.411522818 +0000 UTC m=+1621.203424064" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.457604 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" podStartSLOduration=15.975479068 podStartE2EDuration="38.457584994s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:15.814145173 +0000 UTC m=+1584.606046419" lastFinishedPulling="2026-01-30 21:43:38.296251099 +0000 UTC m=+1607.088152345" observedRunningTime="2026-01-30 21:43:52.443599091 +0000 UTC m=+1621.235500337" watchObservedRunningTime="2026-01-30 21:43:52.457584994 +0000 UTC m=+1621.249486240" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.503442 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" podStartSLOduration=4.608357128 podStartE2EDuration="38.503423983s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.952355712 +0000 UTC m=+1585.744256958" lastFinishedPulling="2026-01-30 21:43:50.847422567 +0000 UTC m=+1619.639323813" observedRunningTime="2026-01-30 21:43:52.473102644 +0000 UTC m=+1621.265003890" watchObservedRunningTime="2026-01-30 21:43:52.503423983 +0000 UTC m=+1621.295325229" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.505094 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" podStartSLOduration=4.350441555 podStartE2EDuration="38.505089084s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.385278501 +0000 UTC m=+1585.177179737" lastFinishedPulling="2026-01-30 21:43:50.53992602 +0000 UTC m=+1619.331827266" observedRunningTime="2026-01-30 21:43:52.500568815 +0000 UTC m=+1621.292470061" watchObservedRunningTime="2026-01-30 21:43:52.505089084 +0000 UTC m=+1621.296990330" Jan 30 21:43:52 crc kubenswrapper[4721]: I0130 21:43:52.556031 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" podStartSLOduration=38.556014851 podStartE2EDuration="38.556014851s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:43:52.552174221 +0000 UTC m=+1621.344075467" watchObservedRunningTime="2026-01-30 21:43:52.556014851 +0000 UTC m=+1621.347916097" Jan 30 21:43:53 crc kubenswrapper[4721]: I0130 21:43:53.372513 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" event={"ID":"7d5fad49-066d-48d6-a9f0-0c3a105df525","Type":"ContainerStarted","Data":"29c519be738099399c8b17c214ef15a3d20c19c3e1b3ad867770a28136d3605a"} Jan 30 21:43:53 crc kubenswrapper[4721]: I0130 21:43:53.402587 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" podStartSLOduration=4.587742679 podStartE2EDuration="39.402567052s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.814684211 +0000 UTC m=+1585.606585457" lastFinishedPulling="2026-01-30 21:43:51.629508584 +0000 UTC m=+1620.421409830" observedRunningTime="2026-01-30 21:43:53.389083935 +0000 UTC m=+1622.180985181" watchObservedRunningTime="2026-01-30 21:43:53.402567052 +0000 UTC m=+1622.194468298" Jan 30 21:43:54 crc kubenswrapper[4721]: I0130 21:43:54.910716 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.008960 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6qxsd" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.048184 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-scnz2" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.076653 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-j5m4f" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.382500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" event={"ID":"34543b19-ae6c-4a39-ad40-0dff196f0fd6","Type":"ContainerStarted","Data":"18ce0c57f985bca4f28148c15d5738da5251a10d9e081c7fb054a1a107ec333f"} Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.382596 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.384536 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" event={"ID":"bca228e2-5d0f-415b-943b-530f9291396a","Type":"ContainerStarted","Data":"1223ab21ac2396b31f47bfd8ab8d17e62f0a7b0c458006ef3098770f28de38dd"} Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.386680 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" event={"ID":"b10d3fdb-7237-4461-ba03-ed926092791f","Type":"ContainerStarted","Data":"ddeccefe64272b219db0268e5194dd70ccc80e271f5212c4d5fc0756b28b5e97"} Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.386809 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.410009 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" podStartSLOduration=37.542724467 podStartE2EDuration="41.409986045s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:50.958782364 +0000 UTC m=+1619.750683610" lastFinishedPulling="2026-01-30 21:43:54.826043942 +0000 UTC m=+1623.617945188" observedRunningTime="2026-01-30 21:43:55.406872449 +0000 UTC m=+1624.198773725" watchObservedRunningTime="2026-01-30 21:43:55.409986045 +0000 UTC m=+1624.201887301" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.430194 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4r7sw" podStartSLOduration=2.914613086 podStartE2EDuration="40.43017372s" podCreationTimestamp="2026-01-30 21:43:15 +0000 UTC" firstStartedPulling="2026-01-30 21:43:17.29150741 +0000 UTC m=+1586.083408646" lastFinishedPulling="2026-01-30 21:43:54.807068034 +0000 UTC m=+1623.598969280" observedRunningTime="2026-01-30 21:43:55.424112202 +0000 UTC m=+1624.216013458" watchObservedRunningTime="2026-01-30 21:43:55.43017372 +0000 UTC m=+1624.222074976" Jan 30 21:43:55 crc kubenswrapper[4721]: I0130 21:43:55.447333 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" podStartSLOduration=36.865800137 podStartE2EDuration="41.44728657s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:50.242989169 +0000 UTC m=+1619.034890425" lastFinishedPulling="2026-01-30 21:43:54.824475612 +0000 UTC m=+1623.616376858" observedRunningTime="2026-01-30 21:43:55.442626496 +0000 UTC m=+1624.234527752" watchObservedRunningTime="2026-01-30 21:43:55.44728657 +0000 UTC m=+1624.239187826" Jan 30 21:43:56 crc kubenswrapper[4721]: I0130 21:43:56.092562 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:43:56 crc kubenswrapper[4721]: E0130 21:43:56.093329 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:43:56 crc kubenswrapper[4721]: I0130 21:43:56.403048 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" event={"ID":"3d3b8ade-729b-4dfc-9ae8-ead1999f9657","Type":"ContainerStarted","Data":"257d437d45eefc67ba11eee5c418b7a99560d8c186d228865a2b2bd6d0b101a3"} Jan 30 21:43:56 crc kubenswrapper[4721]: I0130 21:43:56.445281 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" podStartSLOduration=3.560745345 podStartE2EDuration="42.445241709s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:16.987492841 +0000 UTC m=+1585.779394087" lastFinishedPulling="2026-01-30 21:43:55.871989175 +0000 UTC m=+1624.663890451" observedRunningTime="2026-01-30 21:43:56.439719618 +0000 UTC m=+1625.231620874" watchObservedRunningTime="2026-01-30 21:43:56.445241709 +0000 UTC m=+1625.237142995" Jan 30 21:44:00 crc kubenswrapper[4721]: I0130 21:44:00.557639 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-r42gp" Jan 30 21:44:01 crc kubenswrapper[4721]: I0130 21:44:01.199116 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf" Jan 30 21:44:02 crc kubenswrapper[4721]: I0130 21:44:02.082454 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-57c48854c9-4r8wb" Jan 30 21:44:04 crc kubenswrapper[4721]: E0130 21:44:04.096031 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" podUID="e11d1820-45a9-4ecc-b400-7bbcb6f8b69e" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.398596 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-qhpvg" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.407368 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-ddmz2" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.491959 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-77cms" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.546074 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-snvcj" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.605646 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-fvxk2" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.741627 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-m79fw" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.859728 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-mbz59" Jan 30 21:44:04 crc kubenswrapper[4721]: I0130 21:44:04.912132 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-jztwj" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.082278 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.085634 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qhqfv" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.434130 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-2cmth" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.496088 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kgpcd" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.564253 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-6ptfc" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.596880 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-fd77b8dd7-4t9rw" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.641798 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-x44jj" Jan 30 21:44:05 crc kubenswrapper[4721]: I0130 21:44:05.924668 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-jktzs" Jan 30 21:44:09 crc kubenswrapper[4721]: I0130 21:44:09.093481 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:44:09 crc kubenswrapper[4721]: E0130 21:44:09.094184 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:44:17 crc kubenswrapper[4721]: I0130 21:44:17.628870 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" event={"ID":"e11d1820-45a9-4ecc-b400-7bbcb6f8b69e","Type":"ContainerStarted","Data":"4e4381bccde39abae91032b9f9e07d1edf3f084c69ed1c9a9aa7aae7df3e7976"} Jan 30 21:44:17 crc kubenswrapper[4721]: I0130 21:44:17.629610 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:44:17 crc kubenswrapper[4721]: I0130 21:44:17.650433 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" podStartSLOduration=4.317329439 podStartE2EDuration="1m3.650413869s" podCreationTimestamp="2026-01-30 21:43:14 +0000 UTC" firstStartedPulling="2026-01-30 21:43:17.187877762 +0000 UTC m=+1585.979779008" lastFinishedPulling="2026-01-30 21:44:16.520962172 +0000 UTC m=+1645.312863438" observedRunningTime="2026-01-30 21:44:17.644184566 +0000 UTC m=+1646.436085822" watchObservedRunningTime="2026-01-30 21:44:17.650413869 +0000 UTC m=+1646.442315125" Jan 30 21:44:24 crc kubenswrapper[4721]: I0130 21:44:24.092963 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:44:24 crc kubenswrapper[4721]: E0130 21:44:24.094252 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:44:26 crc kubenswrapper[4721]: I0130 21:44:26.105000 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-c2c8k" Jan 30 21:44:37 crc kubenswrapper[4721]: I0130 21:44:37.092362 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:44:37 crc kubenswrapper[4721]: E0130 21:44:37.093587 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.427896 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5f296"] Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.431012 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.435770 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.435993 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.436081 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.436254 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-774rt" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.441867 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5f296"] Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.511383 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-l8cbk"] Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.512943 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.516381 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.519949 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-l8cbk"] Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.584176 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc00b8a9-5f34-4051-9c44-3e29e650027e-config\") pod \"dnsmasq-dns-675f4bcbfc-5f296\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.584257 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zxnh\" (UniqueName: \"kubernetes.io/projected/cc00b8a9-5f34-4051-9c44-3e29e650027e-kube-api-access-5zxnh\") pod \"dnsmasq-dns-675f4bcbfc-5f296\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.685620 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-config\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.685673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zxnh\" (UniqueName: \"kubernetes.io/projected/cc00b8a9-5f34-4051-9c44-3e29e650027e-kube-api-access-5zxnh\") pod \"dnsmasq-dns-675f4bcbfc-5f296\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.685692 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw2hr\" (UniqueName: \"kubernetes.io/projected/5a657244-7c25-4da8-9e58-484f521374f6-kube-api-access-jw2hr\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.685775 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc00b8a9-5f34-4051-9c44-3e29e650027e-config\") pod \"dnsmasq-dns-675f4bcbfc-5f296\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.685792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.687596 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc00b8a9-5f34-4051-9c44-3e29e650027e-config\") pod \"dnsmasq-dns-675f4bcbfc-5f296\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.709019 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zxnh\" (UniqueName: \"kubernetes.io/projected/cc00b8a9-5f34-4051-9c44-3e29e650027e-kube-api-access-5zxnh\") pod \"dnsmasq-dns-675f4bcbfc-5f296\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.765934 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.786699 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.786825 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-config\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.786856 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw2hr\" (UniqueName: \"kubernetes.io/projected/5a657244-7c25-4da8-9e58-484f521374f6-kube-api-access-jw2hr\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.787786 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.787869 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-config\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.810316 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw2hr\" (UniqueName: \"kubernetes.io/projected/5a657244-7c25-4da8-9e58-484f521374f6-kube-api-access-jw2hr\") pod \"dnsmasq-dns-78dd6ddcc-l8cbk\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:42 crc kubenswrapper[4721]: I0130 21:44:42.829204 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:44:43 crc kubenswrapper[4721]: I0130 21:44:43.262791 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5f296"] Jan 30 21:44:43 crc kubenswrapper[4721]: I0130 21:44:43.336998 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-l8cbk"] Jan 30 21:44:43 crc kubenswrapper[4721]: W0130 21:44:43.337474 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a657244_7c25_4da8_9e58_484f521374f6.slice/crio-1a3d9cc23f6c8aff181b56520fd668d3c3e1fb9dbd67b84e67ce386c47eaee6f WatchSource:0}: Error finding container 1a3d9cc23f6c8aff181b56520fd668d3c3e1fb9dbd67b84e67ce386c47eaee6f: Status 404 returned error can't find the container with id 1a3d9cc23f6c8aff181b56520fd668d3c3e1fb9dbd67b84e67ce386c47eaee6f Jan 30 21:44:43 crc kubenswrapper[4721]: I0130 21:44:43.898862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" event={"ID":"cc00b8a9-5f34-4051-9c44-3e29e650027e","Type":"ContainerStarted","Data":"c2199279d9d34c63fbb08b2964edd457cc61288bdaf152ec0f6d1ade01241fe5"} Jan 30 21:44:43 crc kubenswrapper[4721]: I0130 21:44:43.901609 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" event={"ID":"5a657244-7c25-4da8-9e58-484f521374f6","Type":"ContainerStarted","Data":"1a3d9cc23f6c8aff181b56520fd668d3c3e1fb9dbd67b84e67ce386c47eaee6f"} Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.146077 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5f296"] Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.164581 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pwbw8"] Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.165817 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.179933 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pwbw8"] Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.325430 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-config\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.325751 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-dns-svc\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.325805 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkw48\" (UniqueName: \"kubernetes.io/projected/2e533be2-6d15-4da5-aaba-332e873021a7-kube-api-access-hkw48\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.426024 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-l8cbk"] Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.426851 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-dns-svc\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.426943 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkw48\" (UniqueName: \"kubernetes.io/projected/2e533be2-6d15-4da5-aaba-332e873021a7-kube-api-access-hkw48\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.427023 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-config\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.428260 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-config\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.428464 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-dns-svc\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.456277 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkw48\" (UniqueName: \"kubernetes.io/projected/2e533be2-6d15-4da5-aaba-332e873021a7-kube-api-access-hkw48\") pod \"dnsmasq-dns-666b6646f7-pwbw8\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.462287 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wwk6b"] Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.466427 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.471270 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wwk6b"] Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.494501 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.631210 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.631290 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-config\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.631536 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw5kf\" (UniqueName: \"kubernetes.io/projected/a157c5e0-ff7c-4230-9762-55f2abe4df51-kube-api-access-xw5kf\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.732780 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-config\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.733107 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw5kf\" (UniqueName: \"kubernetes.io/projected/a157c5e0-ff7c-4230-9762-55f2abe4df51-kube-api-access-xw5kf\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.733171 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.733994 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.734496 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-config\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.755543 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw5kf\" (UniqueName: \"kubernetes.io/projected/a157c5e0-ff7c-4230-9762-55f2abe4df51-kube-api-access-xw5kf\") pod \"dnsmasq-dns-57d769cc4f-wwk6b\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:45 crc kubenswrapper[4721]: I0130 21:44:45.812714 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.066474 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pwbw8"] Jan 30 21:44:46 crc kubenswrapper[4721]: W0130 21:44:46.112810 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e533be2_6d15_4da5_aaba_332e873021a7.slice/crio-90d8bef3921161a3fe7a181c39bba6f693aef2b4e2f727717b73a9cca21a5084 WatchSource:0}: Error finding container 90d8bef3921161a3fe7a181c39bba6f693aef2b4e2f727717b73a9cca21a5084: Status 404 returned error can't find the container with id 90d8bef3921161a3fe7a181c39bba6f693aef2b4e2f727717b73a9cca21a5084 Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.318318 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.320484 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.323974 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.324177 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.324420 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.324586 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.325002 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.325144 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-bss2v" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.325231 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.326529 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.332286 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wwk6b"] Jan 30 21:44:46 crc kubenswrapper[4721]: W0130 21:44:46.372057 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda157c5e0_ff7c_4230_9762_55f2abe4df51.slice/crio-0084074702c3e4bec933c31d3fa0058d796f92881474cf33ba2aadc333df6ea3 WatchSource:0}: Error finding container 0084074702c3e4bec933c31d3fa0058d796f92881474cf33ba2aadc333df6ea3: Status 404 returned error can't find the container with id 0084074702c3e4bec933c31d3fa0058d796f92881474cf33ba2aadc333df6ea3 Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448194 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b667j\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-kube-api-access-b667j\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448236 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d483e4e0-6513-44ce-b601-359b9c2262ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448253 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448281 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448313 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448502 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-config-data\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448648 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d483e4e0-6513-44ce-b601-359b9c2262ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448712 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448788 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448807 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.448852 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555330 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b667j\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-kube-api-access-b667j\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d483e4e0-6513-44ce-b601-359b9c2262ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555705 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555725 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555792 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-config-data\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555859 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d483e4e0-6513-44ce-b601-359b9c2262ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555898 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555926 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555941 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.555975 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.556139 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.556943 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.557767 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.558349 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.558952 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-config-data\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.561221 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.561278 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d483e4e0-6513-44ce-b601-359b9c2262ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.562137 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d483e4e0-6513-44ce-b601-359b9c2262ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.565905 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.565957 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d2e49a257cea4677cd616c88fc5a81899852b75682faa476306530b19a572e1c/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.570473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.574894 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b667j\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-kube-api-access-b667j\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.585212 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.587190 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.594708 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-5sdtt" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.594901 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.595059 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.595067 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.595197 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.595271 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.595289 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.604048 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.628379 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.641823 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.762590 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.762672 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.762822 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2dgp\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-kube-api-access-k2dgp\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.762895 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.762957 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.763071 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.763131 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.763183 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1f120802-4119-4ed8-bf74-62b1e4a534bc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.763257 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.764689 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.764772 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1f120802-4119-4ed8-bf74-62b1e4a534bc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868214 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1f120802-4119-4ed8-bf74-62b1e4a534bc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868236 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868315 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2dgp\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-kube-api-access-k2dgp\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868343 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868367 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868394 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868417 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868434 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1f120802-4119-4ed8-bf74-62b1e4a534bc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.868454 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.874227 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.874767 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.875452 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.875668 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1f120802-4119-4ed8-bf74-62b1e4a534bc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.878038 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.878081 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/825a5bf72240810fee23f32c55feb81894d03f5689f753e1fa13da9c43ef0714/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.879516 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.880567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1f120802-4119-4ed8-bf74-62b1e4a534bc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.880699 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.880813 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.882683 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.904859 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2dgp\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-kube-api-access-k2dgp\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.939330 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" event={"ID":"a157c5e0-ff7c-4230-9762-55f2abe4df51","Type":"ContainerStarted","Data":"0084074702c3e4bec933c31d3fa0058d796f92881474cf33ba2aadc333df6ea3"} Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.941944 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" event={"ID":"2e533be2-6d15-4da5-aaba-332e873021a7","Type":"ContainerStarted","Data":"90d8bef3921161a3fe7a181c39bba6f693aef2b4e2f727717b73a9cca21a5084"} Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.942880 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:46 crc kubenswrapper[4721]: I0130 21:44:46.951058 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.197486 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:44:47 crc kubenswrapper[4721]: W0130 21:44:47.208074 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd483e4e0_6513_44ce_b601_359b9c2262ca.slice/crio-7c35c52bf45495b6d32fad9ef0dd875bb2a6146d773de2577a158d4bdf5faab5 WatchSource:0}: Error finding container 7c35c52bf45495b6d32fad9ef0dd875bb2a6146d773de2577a158d4bdf5faab5: Status 404 returned error can't find the container with id 7c35c52bf45495b6d32fad9ef0dd875bb2a6146d773de2577a158d4bdf5faab5 Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.460719 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.845103 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.846606 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.863118 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-tjmt5" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.865917 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.866155 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.867007 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.867470 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.868682 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.969733 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d483e4e0-6513-44ce-b601-359b9c2262ca","Type":"ContainerStarted","Data":"7c35c52bf45495b6d32fad9ef0dd875bb2a6146d773de2577a158d4bdf5faab5"} Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.978181 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1f120802-4119-4ed8-bf74-62b1e4a534bc","Type":"ContainerStarted","Data":"73145151fc6d97c9900c2ca67909d64e973403bcb60449a9ef8af3d302dbddce"} Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998328 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96303720-27c1-495f-8597-5891c08c5e06-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998382 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96303720-27c1-495f-8597-5891c08c5e06-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998405 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-operator-scripts\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998642 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998774 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-kolla-config\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998839 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-config-data-default\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998898 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96303720-27c1-495f-8597-5891c08c5e06-config-data-generated\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:47 crc kubenswrapper[4721]: I0130 21:44:47.998933 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwg6s\" (UniqueName: \"kubernetes.io/projected/96303720-27c1-495f-8597-5891c08c5e06-kube-api-access-jwg6s\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.107949 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-operator-scripts\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.108112 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.108194 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-kolla-config\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.108235 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-config-data-default\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.108271 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96303720-27c1-495f-8597-5891c08c5e06-config-data-generated\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.109665 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwg6s\" (UniqueName: \"kubernetes.io/projected/96303720-27c1-495f-8597-5891c08c5e06-kube-api-access-jwg6s\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.109872 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96303720-27c1-495f-8597-5891c08c5e06-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.109899 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96303720-27c1-495f-8597-5891c08c5e06-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.111536 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.111570 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1090a4d2e947d8da01273ae2b0b4c3c70ceb42d1531108d1f039e1cb5ba6b6ad/globalmount\"" pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.111995 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96303720-27c1-495f-8597-5891c08c5e06-config-data-generated\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.112399 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-operator-scripts\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.114853 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-config-data-default\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.117946 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96303720-27c1-495f-8597-5891c08c5e06-kolla-config\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.126717 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96303720-27c1-495f-8597-5891c08c5e06-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.150937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwg6s\" (UniqueName: \"kubernetes.io/projected/96303720-27c1-495f-8597-5891c08c5e06-kube-api-access-jwg6s\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.151376 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96303720-27c1-495f-8597-5891c08c5e06-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.176353 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0011319-fb88-466d-b43d-2e21f9a6e3b7\") pod \"openstack-galera-0\" (UID: \"96303720-27c1-495f-8597-5891c08c5e06\") " pod="openstack/openstack-galera-0" Jan 30 21:44:48 crc kubenswrapper[4721]: I0130 21:44:48.193807 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.010330 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 30 21:44:49 crc kubenswrapper[4721]: W0130 21:44:49.065766 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96303720_27c1_495f_8597_5891c08c5e06.slice/crio-a58727f657a1e017e7f7cd98e26b8f0e162972e9d3719fb088795c58386e4095 WatchSource:0}: Error finding container a58727f657a1e017e7f7cd98e26b8f0e162972e9d3719fb088795c58386e4095: Status 404 returned error can't find the container with id a58727f657a1e017e7f7cd98e26b8f0e162972e9d3719fb088795c58386e4095 Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.071842 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.073216 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.078176 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.078377 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.078506 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-lxm5j" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.078558 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.079054 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143115 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143159 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143187 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143218 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcgp6\" (UniqueName: \"kubernetes.io/projected/8bff96ff-2424-4622-8c4d-d866a4b28b21-kube-api-access-kcgp6\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143261 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bff96ff-2424-4622-8c4d-d866a4b28b21-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143283 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8bff96ff-2424-4622-8c4d-d866a4b28b21-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143319 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.143509 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bff96ff-2424-4622-8c4d-d866a4b28b21-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246181 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246605 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246638 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcgp6\" (UniqueName: \"kubernetes.io/projected/8bff96ff-2424-4622-8c4d-d866a4b28b21-kube-api-access-kcgp6\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246717 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bff96ff-2424-4622-8c4d-d866a4b28b21-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246743 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8bff96ff-2424-4622-8c4d-d866a4b28b21-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246771 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.246811 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bff96ff-2424-4622-8c4d-d866a4b28b21-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.247310 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.248019 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.248499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bff96ff-2424-4622-8c4d-d866a4b28b21-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.249616 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8bff96ff-2424-4622-8c4d-d866a4b28b21-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.253679 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bff96ff-2424-4622-8c4d-d866a4b28b21-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.261177 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.261212 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/dcd748f271bcb4b2a73e739c464be5ffa8ec6e5f83a8b85b273f6e836f9746fb/globalmount\"" pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.265484 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bff96ff-2424-4622-8c4d-d866a4b28b21-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.267578 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcgp6\" (UniqueName: \"kubernetes.io/projected/8bff96ff-2424-4622-8c4d-d866a4b28b21-kube-api-access-kcgp6\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.326697 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-88ced1c4-ec9f-4e25-b3d4-c93813931331\") pod \"openstack-cell1-galera-0\" (UID: \"8bff96ff-2424-4622-8c4d-d866a4b28b21\") " pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.400362 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.495105 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.497234 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.502168 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-52ngx" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.502395 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.502539 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.507213 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.569016 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llpfp\" (UniqueName: \"kubernetes.io/projected/7548d1d0-371f-4bf8-a557-a9734c49a52e-kube-api-access-llpfp\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.569059 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7548d1d0-371f-4bf8-a557-a9734c49a52e-kolla-config\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.569087 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7548d1d0-371f-4bf8-a557-a9734c49a52e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.569144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548d1d0-371f-4bf8-a557-a9734c49a52e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.569197 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7548d1d0-371f-4bf8-a557-a9734c49a52e-config-data\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.673370 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llpfp\" (UniqueName: \"kubernetes.io/projected/7548d1d0-371f-4bf8-a557-a9734c49a52e-kube-api-access-llpfp\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.673449 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7548d1d0-371f-4bf8-a557-a9734c49a52e-kolla-config\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.673491 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7548d1d0-371f-4bf8-a557-a9734c49a52e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.673558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548d1d0-371f-4bf8-a557-a9734c49a52e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.673618 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7548d1d0-371f-4bf8-a557-a9734c49a52e-config-data\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.674645 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7548d1d0-371f-4bf8-a557-a9734c49a52e-kolla-config\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.674713 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7548d1d0-371f-4bf8-a557-a9734c49a52e-config-data\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.678410 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7548d1d0-371f-4bf8-a557-a9734c49a52e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.681006 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548d1d0-371f-4bf8-a557-a9734c49a52e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.696275 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llpfp\" (UniqueName: \"kubernetes.io/projected/7548d1d0-371f-4bf8-a557-a9734c49a52e-kube-api-access-llpfp\") pod \"memcached-0\" (UID: \"7548d1d0-371f-4bf8-a557-a9734c49a52e\") " pod="openstack/memcached-0" Jan 30 21:44:49 crc kubenswrapper[4721]: I0130 21:44:49.857402 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 30 21:44:50 crc kubenswrapper[4721]: I0130 21:44:50.000681 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"96303720-27c1-495f-8597-5891c08c5e06","Type":"ContainerStarted","Data":"a58727f657a1e017e7f7cd98e26b8f0e162972e9d3719fb088795c58386e4095"} Jan 30 21:44:50 crc kubenswrapper[4721]: I0130 21:44:50.314008 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 30 21:44:50 crc kubenswrapper[4721]: W0130 21:44:50.318390 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bff96ff_2424_4622_8c4d_d866a4b28b21.slice/crio-1bb6a5fd99801630700e5a2d1b8d29d63659fcbe6ffeb56fd35ece9dc208009b WatchSource:0}: Error finding container 1bb6a5fd99801630700e5a2d1b8d29d63659fcbe6ffeb56fd35ece9dc208009b: Status 404 returned error can't find the container with id 1bb6a5fd99801630700e5a2d1b8d29d63659fcbe6ffeb56fd35ece9dc208009b Jan 30 21:44:50 crc kubenswrapper[4721]: I0130 21:44:50.388014 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 30 21:44:50 crc kubenswrapper[4721]: W0130 21:44:50.420455 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7548d1d0_371f_4bf8_a557_a9734c49a52e.slice/crio-631744dee18b71e981e0f11e25b1d7b48f888933149e6958cce33b902d203741 WatchSource:0}: Error finding container 631744dee18b71e981e0f11e25b1d7b48f888933149e6958cce33b902d203741: Status 404 returned error can't find the container with id 631744dee18b71e981e0f11e25b1d7b48f888933149e6958cce33b902d203741 Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.029136 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8bff96ff-2424-4622-8c4d-d866a4b28b21","Type":"ContainerStarted","Data":"1bb6a5fd99801630700e5a2d1b8d29d63659fcbe6ffeb56fd35ece9dc208009b"} Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.035200 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7548d1d0-371f-4bf8-a557-a9734c49a52e","Type":"ContainerStarted","Data":"631744dee18b71e981e0f11e25b1d7b48f888933149e6958cce33b902d203741"} Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.073374 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.074285 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.091308 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-r48r7" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.092916 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:44:51 crc kubenswrapper[4721]: E0130 21:44:51.093192 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.100006 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.235883 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j4n8\" (UniqueName: \"kubernetes.io/projected/b357dd79-4a4f-48c7-ba6f-058ca30785f5-kube-api-access-2j4n8\") pod \"kube-state-metrics-0\" (UID: \"b357dd79-4a4f-48c7-ba6f-058ca30785f5\") " pod="openstack/kube-state-metrics-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.337490 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j4n8\" (UniqueName: \"kubernetes.io/projected/b357dd79-4a4f-48c7-ba6f-058ca30785f5-kube-api-access-2j4n8\") pod \"kube-state-metrics-0\" (UID: \"b357dd79-4a4f-48c7-ba6f-058ca30785f5\") " pod="openstack/kube-state-metrics-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.368508 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j4n8\" (UniqueName: \"kubernetes.io/projected/b357dd79-4a4f-48c7-ba6f-058ca30785f5-kube-api-access-2j4n8\") pod \"kube-state-metrics-0\" (UID: \"b357dd79-4a4f-48c7-ba6f-058ca30785f5\") " pod="openstack/kube-state-metrics-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.467081 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.888607 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.896038 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.901693 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.901093 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.901867 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-9v85j" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.901903 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.901939 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Jan 30 21:44:51 crc kubenswrapper[4721]: I0130 21:44:51.902129 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056583 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/87b1d1f9-cf16-401a-b55d-a6d2434e0284-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056665 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056692 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056724 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056753 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/87b1d1f9-cf16-401a-b55d-a6d2434e0284-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056788 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/87b1d1f9-cf16-401a-b55d-a6d2434e0284-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.056970 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb8gl\" (UniqueName: \"kubernetes.io/projected/87b1d1f9-cf16-401a-b55d-a6d2434e0284-kube-api-access-vb8gl\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.090345 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162426 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162477 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162513 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162544 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/87b1d1f9-cf16-401a-b55d-a6d2434e0284-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162567 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/87b1d1f9-cf16-401a-b55d-a6d2434e0284-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162620 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb8gl\" (UniqueName: \"kubernetes.io/projected/87b1d1f9-cf16-401a-b55d-a6d2434e0284-kube-api-access-vb8gl\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.162636 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/87b1d1f9-cf16-401a-b55d-a6d2434e0284-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.164825 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/87b1d1f9-cf16-401a-b55d-a6d2434e0284-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.172176 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.172390 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.172529 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.172664 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.182898 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.188322 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.188657 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/87b1d1f9-cf16-401a-b55d-a6d2434e0284-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.188817 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/87b1d1f9-cf16-401a-b55d-a6d2434e0284-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.200548 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/87b1d1f9-cf16-401a-b55d-a6d2434e0284-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.203171 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb8gl\" (UniqueName: \"kubernetes.io/projected/87b1d1f9-cf16-401a-b55d-a6d2434e0284-kube-api-access-vb8gl\") pod \"alertmanager-metric-storage-0\" (UID: \"87b1d1f9-cf16-401a-b55d-a6d2434e0284\") " pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.228427 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-9v85j" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.238650 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.403489 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.405728 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.411535 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.411720 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.411847 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.411949 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-w9nwj" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.412041 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.412160 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.412255 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.412387 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.425541 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579105 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579157 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579269 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579460 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579628 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8306e740-fd1d-459e-a0db-fc01a639f991-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579744 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-config\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.579827 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s89cq\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-kube-api-access-s89cq\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.580029 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.580068 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682045 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682110 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682158 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682202 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682257 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8306e740-fd1d-459e-a0db-fc01a639f991-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682309 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682333 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-config\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682356 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s89cq\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-kube-api-access-s89cq\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682394 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.682421 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.683077 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.683083 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.684173 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.687037 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-config\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.690381 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.690703 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.691954 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8306e740-fd1d-459e-a0db-fc01a639f991-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.692499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.695869 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.695898 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f2942c48d709e84727a5b6f13c3b84cc2416f605a50cdd6b9533ad0654f018a6/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.703019 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s89cq\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-kube-api-access-s89cq\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.739201 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.760565 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 30 21:44:52 crc kubenswrapper[4721]: I0130 21:44:52.810408 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 30 21:44:53 crc kubenswrapper[4721]: I0130 21:44:53.066123 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b357dd79-4a4f-48c7-ba6f-058ca30785f5","Type":"ContainerStarted","Data":"e8cc390e27cea4f710306d3e27144383ed96863136a8ccc67eb46383356fb152"} Jan 30 21:44:53 crc kubenswrapper[4721]: I0130 21:44:53.067635 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87b1d1f9-cf16-401a-b55d-a6d2434e0284","Type":"ContainerStarted","Data":"a6904dccdbebbc16e2ee6f5281a0be5f43162dc86498784e9550a9e2ed7d7408"} Jan 30 21:44:53 crc kubenswrapper[4721]: I0130 21:44:53.293338 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.174218 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4k958"] Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.177699 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.183031 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.183410 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-6bvjt" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.183442 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.186680 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-8mqsj"] Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.188747 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.192884 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4k958"] Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.197662 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-8mqsj"] Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260160 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-run\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260232 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-lib\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260254 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-scripts\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260281 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-etc-ovs\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260313 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j8bc\" (UniqueName: \"kubernetes.io/projected/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-kube-api-access-8j8bc\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260346 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-run\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260377 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-log\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.260392 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-run-ovn\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.361695 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/522b5333-a647-446e-a261-b1828a1d20a3-scripts\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.361768 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-run\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.361842 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b5333-a647-446e-a261-b1828a1d20a3-combined-ca-bundle\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.361957 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-log-ovn\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362439 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-lib\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362483 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-scripts\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362520 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btzcx\" (UniqueName: \"kubernetes.io/projected/522b5333-a647-446e-a261-b1828a1d20a3-kube-api-access-btzcx\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/522b5333-a647-446e-a261-b1828a1d20a3-ovn-controller-tls-certs\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-etc-ovs\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362594 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j8bc\" (UniqueName: \"kubernetes.io/projected/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-kube-api-access-8j8bc\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362636 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-run\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362676 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-log\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-run-ovn\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.362909 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-run\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.363033 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-run-ovn\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.363190 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-lib\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.363340 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-etc-ovs\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.363564 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-run\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.363837 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-var-log\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.367061 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-scripts\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.386148 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j8bc\" (UniqueName: \"kubernetes.io/projected/b0f6d373-29bd-47a5-8cf5-3937fbc1498f-kube-api-access-8j8bc\") pod \"ovn-controller-ovs-8mqsj\" (UID: \"b0f6d373-29bd-47a5-8cf5-3937fbc1498f\") " pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.464129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/522b5333-a647-446e-a261-b1828a1d20a3-ovn-controller-tls-certs\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.464682 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/522b5333-a647-446e-a261-b1828a1d20a3-scripts\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.464785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b5333-a647-446e-a261-b1828a1d20a3-combined-ca-bundle\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.464810 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-log-ovn\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.464871 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btzcx\" (UniqueName: \"kubernetes.io/projected/522b5333-a647-446e-a261-b1828a1d20a3-kube-api-access-btzcx\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.465167 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/522b5333-a647-446e-a261-b1828a1d20a3-var-log-ovn\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.466967 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/522b5333-a647-446e-a261-b1828a1d20a3-scripts\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.469969 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/522b5333-a647-446e-a261-b1828a1d20a3-ovn-controller-tls-certs\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.478224 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b5333-a647-446e-a261-b1828a1d20a3-combined-ca-bundle\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.480366 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btzcx\" (UniqueName: \"kubernetes.io/projected/522b5333-a647-446e-a261-b1828a1d20a3-kube-api-access-btzcx\") pod \"ovn-controller-4k958\" (UID: \"522b5333-a647-446e-a261-b1828a1d20a3\") " pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.568248 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4k958" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.583991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.955151 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.957014 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.963123 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.963181 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.963551 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.963735 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 30 21:44:56 crc kubenswrapper[4721]: I0130 21:44:56.974106 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:56.993657 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-rm24b" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.091823 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.091929 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.091983 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63a5898f-2b47-44bb-85a0-1700940899c1-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.092001 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzg59\" (UniqueName: \"kubernetes.io/projected/63a5898f-2b47-44bb-85a0-1700940899c1-kube-api-access-bzg59\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.092040 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a5898f-2b47-44bb-85a0-1700940899c1-config\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.092069 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63a5898f-2b47-44bb-85a0-1700940899c1-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.092092 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.092126 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.193352 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63a5898f-2b47-44bb-85a0-1700940899c1-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.194276 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzg59\" (UniqueName: \"kubernetes.io/projected/63a5898f-2b47-44bb-85a0-1700940899c1-kube-api-access-bzg59\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.194451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a5898f-2b47-44bb-85a0-1700940899c1-config\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.194568 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63a5898f-2b47-44bb-85a0-1700940899c1-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.193984 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63a5898f-2b47-44bb-85a0-1700940899c1-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.195422 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a5898f-2b47-44bb-85a0-1700940899c1-config\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.196040 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63a5898f-2b47-44bb-85a0-1700940899c1-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.196185 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.196710 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.196832 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.196983 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.200318 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.200948 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.213078 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63a5898f-2b47-44bb-85a0-1700940899c1-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.213392 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.213472 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/72dd35305ab62fbab618bdd49f361e6f911601fec5a42dc9a5ea87518da356cc/globalmount\"" pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.223708 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzg59\" (UniqueName: \"kubernetes.io/projected/63a5898f-2b47-44bb-85a0-1700940899c1-kube-api-access-bzg59\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.243416 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2a82b2f3-90d7-44c6-af06-8b7677454ac3\") pod \"ovsdbserver-nb-0\" (UID: \"63a5898f-2b47-44bb-85a0-1700940899c1\") " pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:57 crc kubenswrapper[4721]: I0130 21:44:57.294970 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.310990 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.313333 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.321015 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-r9ctr" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.321345 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.321499 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.321624 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.322384 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.417707 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.417772 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.417907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bwf2\" (UniqueName: \"kubernetes.io/projected/796cb9cb-aad7-4645-89ae-ae8764bfbe17-kube-api-access-4bwf2\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.418047 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.418115 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/796cb9cb-aad7-4645-89ae-ae8764bfbe17-config\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.418150 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.418186 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796cb9cb-aad7-4645-89ae-ae8764bfbe17-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.418247 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/796cb9cb-aad7-4645-89ae-ae8764bfbe17-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.521780 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.521838 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.521883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bwf2\" (UniqueName: \"kubernetes.io/projected/796cb9cb-aad7-4645-89ae-ae8764bfbe17-kube-api-access-4bwf2\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.521948 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.522015 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/796cb9cb-aad7-4645-89ae-ae8764bfbe17-config\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.522056 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.522099 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796cb9cb-aad7-4645-89ae-ae8764bfbe17-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.522159 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/796cb9cb-aad7-4645-89ae-ae8764bfbe17-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.522776 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/796cb9cb-aad7-4645-89ae-ae8764bfbe17-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.523036 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/796cb9cb-aad7-4645-89ae-ae8764bfbe17-config\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.523403 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796cb9cb-aad7-4645-89ae-ae8764bfbe17-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.525618 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.528096 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.528537 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.528572 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0714fab928e8d407457f4b4e2f26c3e5e42da02db71c99fb14b2b7ab91f10701/globalmount\"" pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.530795 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796cb9cb-aad7-4645-89ae-ae8764bfbe17-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.539199 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bwf2\" (UniqueName: \"kubernetes.io/projected/796cb9cb-aad7-4645-89ae-ae8764bfbe17-kube-api-access-4bwf2\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.559034 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a3fac9f-f852-4a36-908e-337a49c5cab5\") pod \"ovsdbserver-sb-0\" (UID: \"796cb9cb-aad7-4645-89ae-ae8764bfbe17\") " pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: I0130 21:44:58.635393 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 30 21:44:58 crc kubenswrapper[4721]: W0130 21:44:58.703474 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8306e740_fd1d_459e_a0db_fc01a639f991.slice/crio-9e1243b2d40ae2912216adc9ab6f871783d1215c9f663a8684f07fe077844029 WatchSource:0}: Error finding container 9e1243b2d40ae2912216adc9ab6f871783d1215c9f663a8684f07fe077844029: Status 404 returned error can't find the container with id 9e1243b2d40ae2912216adc9ab6f871783d1215c9f663a8684f07fe077844029 Jan 30 21:44:59 crc kubenswrapper[4721]: I0130 21:44:59.145284 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerStarted","Data":"9e1243b2d40ae2912216adc9ab6f871783d1215c9f663a8684f07fe077844029"} Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.140128 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.142020 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.145619 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.145921 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.147784 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.275706 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d725937f-ee2d-431d-a0ed-94dd553cd014-secret-volume\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.275875 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d725937f-ee2d-431d-a0ed-94dd553cd014-config-volume\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.275930 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz92v\" (UniqueName: \"kubernetes.io/projected/d725937f-ee2d-431d-a0ed-94dd553cd014-kube-api-access-dz92v\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.377426 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d725937f-ee2d-431d-a0ed-94dd553cd014-secret-volume\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.378333 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d725937f-ee2d-431d-a0ed-94dd553cd014-config-volume\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.378403 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz92v\" (UniqueName: \"kubernetes.io/projected/d725937f-ee2d-431d-a0ed-94dd553cd014-kube-api-access-dz92v\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.379660 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d725937f-ee2d-431d-a0ed-94dd553cd014-config-volume\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.383484 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d725937f-ee2d-431d-a0ed-94dd553cd014-secret-volume\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.394608 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz92v\" (UniqueName: \"kubernetes.io/projected/d725937f-ee2d-431d-a0ed-94dd553cd014-kube-api-access-dz92v\") pod \"collect-profiles-29496825-2hl42\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.466577 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.567067 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.568420 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.577366 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-6x9pm" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.578133 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.578432 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.578478 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.581112 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.594681 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.684508 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.684591 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqr5d\" (UniqueName: \"kubernetes.io/projected/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-kube-api-access-lqr5d\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.685089 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.685254 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-config\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.685361 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.760773 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.762422 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.768415 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.768802 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.768992 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.781609 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.788371 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.788436 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqr5d\" (UniqueName: \"kubernetes.io/projected/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-kube-api-access-lqr5d\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.788497 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.788535 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-config\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.788556 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.789280 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.792599 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-config\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.802016 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.805249 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.816922 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqr5d\" (UniqueName: \"kubernetes.io/projected/f7847558-f6b6-4f0a-8fd3-45e0fef7fce4-kube-api-access-lqr5d\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-s89nx\" (UID: \"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.859219 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.860563 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.868062 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.868284 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.894144 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.895986 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b976659-d481-4cd4-b1b1-72a7d465067d-config\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.896101 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.896165 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.896651 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb4p8\" (UniqueName: \"kubernetes.io/projected/3b976659-d481-4cd4-b1b1-72a7d465067d-kube-api-access-qb4p8\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.896698 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.896734 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.896880 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.970120 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.971228 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.974765 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.974794 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.974823 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.974931 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.974966 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.975067 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.983604 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.987685 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.989367 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-x7mhj" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.994910 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn"] Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998068 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998115 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998165 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998194 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-928fs\" (UniqueName: \"kubernetes.io/projected/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-kube-api-access-928fs\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb4p8\" (UniqueName: \"kubernetes.io/projected/3b976659-d481-4cd4-b1b1-72a7d465067d-kube-api-access-qb4p8\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998240 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998266 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998287 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-config\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998322 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998348 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.998397 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b976659-d481-4cd4-b1b1-72a7d465067d-config\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:00 crc kubenswrapper[4721]: I0130 21:45:00.999259 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b976659-d481-4cd4-b1b1-72a7d465067d-config\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.000651 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.005499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.010620 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz"] Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.024191 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.024875 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/3b976659-d481-4cd4-b1b1-72a7d465067d-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.027701 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb4p8\" (UniqueName: \"kubernetes.io/projected/3b976659-d481-4cd4-b1b1-72a7d465067d-kube-api-access-qb4p8\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-csn7z\" (UID: \"3b976659-d481-4cd4-b1b1-72a7d465067d\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.089280 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.101865 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gbwg\" (UniqueName: \"kubernetes.io/projected/a6518545-fdf0-4445-8e62-d7ca4816779d-kube-api-access-2gbwg\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.101932 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.101955 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.101976 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-config\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.101996 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102025 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102056 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102077 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102095 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102122 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102159 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-797vj\" (UniqueName: \"kubernetes.io/projected/9d09968c-71f8-4bad-855b-ebc5abb78989-kube-api-access-797vj\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102186 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102208 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102257 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102340 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102386 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102414 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102433 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102463 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102482 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102524 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102554 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-928fs\" (UniqueName: \"kubernetes.io/projected/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-kube-api-access-928fs\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.102585 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.103560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-config\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.104169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.106628 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.107186 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.132361 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-928fs\" (UniqueName: \"kubernetes.io/projected/6e77f4bd-bf5d-4043-ae9e-e938a4e99b69-kube-api-access-928fs\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r\" (UID: \"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.185522 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204608 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204650 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204678 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204700 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204758 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204796 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gbwg\" (UniqueName: \"kubernetes.io/projected/a6518545-fdf0-4445-8e62-d7ca4816779d-kube-api-access-2gbwg\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204822 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204847 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204876 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204895 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204910 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204928 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204964 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-797vj\" (UniqueName: \"kubernetes.io/projected/9d09968c-71f8-4bad-855b-ebc5abb78989-kube-api-access-797vj\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.204993 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.205046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.205079 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.205134 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.205176 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.205743 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.206096 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: E0130 21:45:01.206178 4721 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Jan 30 21:45:01 crc kubenswrapper[4721]: E0130 21:45:01.206222 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tls-secret podName:a6518545-fdf0-4445-8e62-d7ca4816779d nodeName:}" failed. No retries permitted until 2026-01-30 21:45:01.706208365 +0000 UTC m=+1690.498109611 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tls-secret") pod "cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" (UID: "a6518545-fdf0-4445-8e62-d7ca4816779d") : secret "cloudkitty-lokistack-gateway-http" not found Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.206889 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.206974 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: E0130 21:45:01.207580 4721 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Jan 30 21:45:01 crc kubenswrapper[4721]: E0130 21:45:01.207624 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tls-secret podName:9d09968c-71f8-4bad-855b-ebc5abb78989 nodeName:}" failed. No retries permitted until 2026-01-30 21:45:01.707611009 +0000 UTC m=+1690.499512325 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tls-secret") pod "cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" (UID: "9d09968c-71f8-4bad-855b-ebc5abb78989") : secret "cloudkitty-lokistack-gateway-http" not found Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.208353 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.209112 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.209895 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/9d09968c-71f8-4bad-855b-ebc5abb78989-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.210473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.210735 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.212245 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.214077 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.217730 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.218547 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.218642 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.222343 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gbwg\" (UniqueName: \"kubernetes.io/projected/a6518545-fdf0-4445-8e62-d7ca4816779d-kube-api-access-2gbwg\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.225320 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-797vj\" (UniqueName: \"kubernetes.io/projected/9d09968c-71f8-4bad-855b-ebc5abb78989-kube-api-access-797vj\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.713386 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.713506 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.720361 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/9d09968c-71f8-4bad-855b-ebc5abb78989-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-2plvn\" (UID: \"9d09968c-71f8-4bad-855b-ebc5abb78989\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.721644 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a6518545-fdf0-4445-8e62-d7ca4816779d-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz\" (UID: \"a6518545-fdf0-4445-8e62-d7ca4816779d\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.740772 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.743197 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.751043 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.751176 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.753732 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.840026 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.841387 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.843746 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.843836 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.855959 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.886134 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.916668 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.916758 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.916792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.916855 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.917030 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.917107 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.917186 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.917232 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drkzm\" (UniqueName: \"kubernetes.io/projected/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-kube-api-access-drkzm\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.923360 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.924500 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.925036 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.925905 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.926092 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Jan 30 21:45:01 crc kubenswrapper[4721]: I0130 21:45:01.933144 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018541 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018598 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018626 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018661 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drkzm\" (UniqueName: \"kubernetes.io/projected/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-kube-api-access-drkzm\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018698 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018722 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018739 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3aadfd-1e26-407f-98a8-c3f5681c2126-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018760 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018815 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018838 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018854 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018872 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018917 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018942 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.018976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx9sr\" (UniqueName: \"kubernetes.io/projected/1a3aadfd-1e26-407f-98a8-c3f5681c2126-kube-api-access-kx9sr\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.019409 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.019980 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.020789 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.021690 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.027612 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.027805 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.037649 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.040252 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.041259 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drkzm\" (UniqueName: \"kubernetes.io/projected/3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3-kube-api-access-drkzm\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.048177 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3\") " pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.114322 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120281 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rhtx\" (UniqueName: \"kubernetes.io/projected/ae53bce6-479b-4d55-9fb5-2441850bec4a-kube-api-access-8rhtx\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120330 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120447 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120589 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120638 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120760 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120823 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120886 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.120994 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx9sr\" (UniqueName: \"kubernetes.io/projected/1a3aadfd-1e26-407f-98a8-c3f5681c2126-kube-api-access-kx9sr\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.121063 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.121161 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae53bce6-479b-4d55-9fb5-2441850bec4a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.121215 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.121258 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.121311 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3aadfd-1e26-407f-98a8-c3f5681c2126-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.122441 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.122620 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.122687 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3aadfd-1e26-407f-98a8-c3f5681c2126-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.125564 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.126170 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.130226 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/1a3aadfd-1e26-407f-98a8-c3f5681c2126-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.140975 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx9sr\" (UniqueName: \"kubernetes.io/projected/1a3aadfd-1e26-407f-98a8-c3f5681c2126-kube-api-access-kx9sr\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.143637 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1a3aadfd-1e26-407f-98a8-c3f5681c2126\") " pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.166406 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223042 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223130 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae53bce6-479b-4d55-9fb5-2441850bec4a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223199 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rhtx\" (UniqueName: \"kubernetes.io/projected/ae53bce6-479b-4d55-9fb5-2441850bec4a-kube-api-access-8rhtx\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223226 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223254 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223368 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.223407 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.224654 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.225987 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae53bce6-479b-4d55-9fb5-2441850bec4a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.226058 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.228176 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.228803 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.230599 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/ae53bce6-479b-4d55-9fb5-2441850bec4a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.240563 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rhtx\" (UniqueName: \"kubernetes.io/projected/ae53bce6-479b-4d55-9fb5-2441850bec4a-kube-api-access-8rhtx\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.247272 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"ae53bce6-479b-4d55-9fb5-2441850bec4a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:02 crc kubenswrapper[4721]: I0130 21:45:02.283859 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:04 crc kubenswrapper[4721]: I0130 21:45:04.092974 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:45:04 crc kubenswrapper[4721]: E0130 21:45:04.094319 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:45:13 crc kubenswrapper[4721]: E0130 21:45:13.851818 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Jan 30 21:45:13 crc kubenswrapper[4721]: E0130 21:45:13.852660 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2dgp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(1f120802-4119-4ed8-bf74-62b1e4a534bc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:13 crc kubenswrapper[4721]: E0130 21:45:13.854467 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" Jan 30 21:45:13 crc kubenswrapper[4721]: E0130 21:45:13.857736 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Jan 30 21:45:13 crc kubenswrapper[4721]: E0130 21:45:13.858186 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b667j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(d483e4e0-6513-44ce-b601-359b9c2262ca): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:13 crc kubenswrapper[4721]: E0130 21:45:13.859448 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" Jan 30 21:45:14 crc kubenswrapper[4721]: E0130 21:45:14.303676 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" Jan 30 21:45:14 crc kubenswrapper[4721]: E0130 21:45:14.303695 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" Jan 30 21:45:16 crc kubenswrapper[4721]: E0130 21:45:16.378849 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Jan 30 21:45:16 crc kubenswrapper[4721]: E0130 21:45:16.380425 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jwg6s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(96303720-27c1-495f-8597-5891c08c5e06): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:16 crc kubenswrapper[4721]: E0130 21:45:16.381766 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="96303720-27c1-495f-8597-5891c08c5e06" Jan 30 21:45:17 crc kubenswrapper[4721]: I0130 21:45:17.092647 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:45:17 crc kubenswrapper[4721]: E0130 21:45:17.093268 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:45:17 crc kubenswrapper[4721]: E0130 21:45:17.327095 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="96303720-27c1-495f-8597-5891c08c5e06" Jan 30 21:45:18 crc kubenswrapper[4721]: E0130 21:45:18.414730 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Jan 30 21:45:18 crc kubenswrapper[4721]: E0130 21:45:18.415072 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n55h696h575h694h64fhfch659hcfh589hd8h5bdh587h5d8h5bch58dh65ch5b6hc7h664hf9h54h75hf8hdh598h5f6h645h65hcchfch559h5dfq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-llpfp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(7548d1d0-371f-4bf8-a557-a9734c49a52e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:18 crc kubenswrapper[4721]: E0130 21:45:18.416526 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="7548d1d0-371f-4bf8-a557-a9734c49a52e" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.079197 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:9a2097bc5b2e02bc1703f64c452ce8fe4bc6775b732db930ff4770b76ae4653a" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.079866 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init-config-reloader,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:9a2097bc5b2e02bc1703f64c452ce8fe4bc6775b732db930ff4770b76ae4653a,Command:[/bin/prometheus-config-reloader],Args:[--watch-interval=0 --listen-address=:8081 --config-file=/etc/prometheus/config/prometheus.yaml.gz --config-envsubst-file=/etc/prometheus/config_out/prometheus.env.yaml --watched-dir=/etc/prometheus/rules/prometheus-metric-storage-rulefiles-0 --watched-dir=/etc/prometheus/rules/prometheus-metric-storage-rulefiles-1 --watched-dir=/etc/prometheus/rules/prometheus-metric-storage-rulefiles-2],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:reloader-init,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:SHARD,Value:0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/prometheus/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-out,ReadOnly:false,MountPath:/etc/prometheus/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-0,ReadOnly:false,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-0,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-1,ReadOnly:false,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-1,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-2,ReadOnly:false,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-2,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s89cq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod prometheus-metric-storage-0_openstack(8306e740-fd1d-459e-a0db-fc01a639f991): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.081077 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/prometheus-metric-storage-0" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.344900 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="7548d1d0-371f-4bf8-a557-a9734c49a52e" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.345282 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:9a2097bc5b2e02bc1703f64c452ce8fe4bc6775b732db930ff4770b76ae4653a\\\"\"" pod="openstack/prometheus-metric-storage-0" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.964839 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.964988 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kcgp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(8bff96ff-2424-4622-8c4d-d866a4b28b21): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:19 crc kubenswrapper[4721]: E0130 21:45:19.966138 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="8bff96ff-2424-4622-8c4d-d866a4b28b21" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.017860 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.018099 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hkw48,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-pwbw8_openstack(2e533be2-6d15-4da5-aaba-332e873021a7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.019378 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.042019 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.042515 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jw2hr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-l8cbk_openstack(5a657244-7c25-4da8-9e58-484f521374f6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.043749 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" podUID="5a657244-7c25-4da8-9e58-484f521374f6" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.069285 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.069461 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5zxnh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-5f296_openstack(cc00b8a9-5f34-4051-9c44-3e29e650027e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.070692 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" podUID="cc00b8a9-5f34-4051-9c44-3e29e650027e" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.118698 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.118870 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xw5kf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-wwk6b_openstack(a157c5e0-ff7c-4230-9762-55f2abe4df51): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.120067 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.375417 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.375676 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" Jan 30 21:45:20 crc kubenswrapper[4721]: E0130 21:45:20.375727 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="8bff96ff-2424-4622-8c4d-d866a4b28b21" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.526380 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.539374 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.552438 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.561910 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.574007 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.589943 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.601101 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4k958"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.660123 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 21:45:21 crc kubenswrapper[4721]: W0130 21:45:21.661631 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e77f4bd_bf5d_4043_ae9e_e938a4e99b69.slice/crio-c18abe2b7adbec3562252d18c9bf038ef0608276c8237c4fb1131df24fb9ed61 WatchSource:0}: Error finding container c18abe2b7adbec3562252d18c9bf038ef0608276c8237c4fb1131df24fb9ed61: Status 404 returned error can't find the container with id c18abe2b7adbec3562252d18c9bf038ef0608276c8237c4fb1131df24fb9ed61 Jan 30 21:45:21 crc kubenswrapper[4721]: W0130 21:45:21.661895 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6518545_fdf0_4445_8e62_d7ca4816779d.slice/crio-374f493309e30b627d20ab156efc4e0e0770eb856a3aaf93bb3029647b2499ef WatchSource:0}: Error finding container 374f493309e30b627d20ab156efc4e0e0770eb856a3aaf93bb3029647b2499ef: Status 404 returned error can't find the container with id 374f493309e30b627d20ab156efc4e0e0770eb856a3aaf93bb3029647b2499ef Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.741117 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.742082 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.782741 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.794191 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.808635 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.811733 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zxnh\" (UniqueName: \"kubernetes.io/projected/cc00b8a9-5f34-4051-9c44-3e29e650027e-kube-api-access-5zxnh\") pod \"cc00b8a9-5f34-4051-9c44-3e29e650027e\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.811819 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc00b8a9-5f34-4051-9c44-3e29e650027e-config\") pod \"cc00b8a9-5f34-4051-9c44-3e29e650027e\" (UID: \"cc00b8a9-5f34-4051-9c44-3e29e650027e\") " Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.811874 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jw2hr\" (UniqueName: \"kubernetes.io/projected/5a657244-7c25-4da8-9e58-484f521374f6-kube-api-access-jw2hr\") pod \"5a657244-7c25-4da8-9e58-484f521374f6\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.811942 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-dns-svc\") pod \"5a657244-7c25-4da8-9e58-484f521374f6\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.812000 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-config\") pod \"5a657244-7c25-4da8-9e58-484f521374f6\" (UID: \"5a657244-7c25-4da8-9e58-484f521374f6\") " Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.812778 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-config" (OuterVolumeSpecName: "config") pod "5a657244-7c25-4da8-9e58-484f521374f6" (UID: "5a657244-7c25-4da8-9e58-484f521374f6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.812897 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5a657244-7c25-4da8-9e58-484f521374f6" (UID: "5a657244-7c25-4da8-9e58-484f521374f6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.813668 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc00b8a9-5f34-4051-9c44-3e29e650027e-config" (OuterVolumeSpecName: "config") pod "cc00b8a9-5f34-4051-9c44-3e29e650027e" (UID: "cc00b8a9-5f34-4051-9c44-3e29e650027e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.828893 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a657244-7c25-4da8-9e58-484f521374f6-kube-api-access-jw2hr" (OuterVolumeSpecName: "kube-api-access-jw2hr") pod "5a657244-7c25-4da8-9e58-484f521374f6" (UID: "5a657244-7c25-4da8-9e58-484f521374f6"). InnerVolumeSpecName "kube-api-access-jw2hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.829580 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc00b8a9-5f34-4051-9c44-3e29e650027e-kube-api-access-5zxnh" (OuterVolumeSpecName: "kube-api-access-5zxnh") pod "cc00b8a9-5f34-4051-9c44-3e29e650027e" (UID: "cc00b8a9-5f34-4051-9c44-3e29e650027e"). InnerVolumeSpecName "kube-api-access-5zxnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.914217 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zxnh\" (UniqueName: \"kubernetes.io/projected/cc00b8a9-5f34-4051-9c44-3e29e650027e-kube-api-access-5zxnh\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.914257 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc00b8a9-5f34-4051-9c44-3e29e650027e-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.914267 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jw2hr\" (UniqueName: \"kubernetes.io/projected/5a657244-7c25-4da8-9e58-484f521374f6-kube-api-access-jw2hr\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.914278 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:21 crc kubenswrapper[4721]: I0130 21:45:21.914318 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a657244-7c25-4da8-9e58-484f521374f6-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.382627 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" event={"ID":"5a657244-7c25-4da8-9e58-484f521374f6","Type":"ContainerDied","Data":"1a3d9cc23f6c8aff181b56520fd668d3c3e1fb9dbd67b84e67ce386c47eaee6f"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.382723 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-l8cbk" Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.384741 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" event={"ID":"a6518545-fdf0-4445-8e62-d7ca4816779d","Type":"ContainerStarted","Data":"374f493309e30b627d20ab156efc4e0e0770eb856a3aaf93bb3029647b2499ef"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.388245 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" event={"ID":"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69","Type":"ContainerStarted","Data":"c18abe2b7adbec3562252d18c9bf038ef0608276c8237c4fb1131df24fb9ed61"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.389755 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.389767 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-5f296" event={"ID":"cc00b8a9-5f34-4051-9c44-3e29e650027e","Type":"ContainerDied","Data":"c2199279d9d34c63fbb08b2964edd457cc61288bdaf152ec0f6d1ade01241fe5"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.393065 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" event={"ID":"d725937f-ee2d-431d-a0ed-94dd553cd014","Type":"ContainerStarted","Data":"1aff202bae43238d52c979e758850674037f593ddc57d3e4296f5e362e4feeba"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.394652 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" event={"ID":"3b976659-d481-4cd4-b1b1-72a7d465067d","Type":"ContainerStarted","Data":"a77a86e224e8bfc55751c6382586a67407ae80d1c1f9ce14da62a06d055025a0"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.396477 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" event={"ID":"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4","Type":"ContainerStarted","Data":"4bea1fd5af0d9b4add1c3cc182347f042b1b92cef46c9b1f62163dec1125c805"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.398079 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4k958" event={"ID":"522b5333-a647-446e-a261-b1828a1d20a3","Type":"ContainerStarted","Data":"2d10150e02718b9ba84073ec66424b02f882478fe98a6ce84abbcd529f154c1f"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.400201 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" event={"ID":"9d09968c-71f8-4bad-855b-ebc5abb78989","Type":"ContainerStarted","Data":"df8283aa4c1f6ff6b1a9974fc1f8d04752b1fe7860c0986b90ca68f3ef472354"} Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.427227 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-l8cbk"] Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.439529 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-l8cbk"] Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.457499 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5f296"] Jan 30 21:45:22 crc kubenswrapper[4721]: I0130 21:45:22.466171 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5f296"] Jan 30 21:45:22 crc kubenswrapper[4721]: W0130 21:45:22.733642 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3aba9e5f_541b_42d2_9cd4_6a6ad04bcbc3.slice/crio-8e1d4d5f15a18a05fd394040c06fdb376659470eacfa4202ebfc5917ace1e442 WatchSource:0}: Error finding container 8e1d4d5f15a18a05fd394040c06fdb376659470eacfa4202ebfc5917ace1e442: Status 404 returned error can't find the container with id 8e1d4d5f15a18a05fd394040c06fdb376659470eacfa4202ebfc5917ace1e442 Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.017139 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 30 21:45:23 crc kubenswrapper[4721]: W0130 21:45:23.260151 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63a5898f_2b47_44bb_85a0_1700940899c1.slice/crio-6bdd4e632fb5141ec918d307cd88cf633a4233a7048661e1d829f6e2baa8d6ce WatchSource:0}: Error finding container 6bdd4e632fb5141ec918d307cd88cf633a4233a7048661e1d829f6e2baa8d6ce: Status 404 returned error can't find the container with id 6bdd4e632fb5141ec918d307cd88cf633a4233a7048661e1d829f6e2baa8d6ce Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.411945 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3","Type":"ContainerStarted","Data":"8e1d4d5f15a18a05fd394040c06fdb376659470eacfa4202ebfc5917ace1e442"} Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.413391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"63a5898f-2b47-44bb-85a0-1700940899c1","Type":"ContainerStarted","Data":"6bdd4e632fb5141ec918d307cd88cf633a4233a7048661e1d829f6e2baa8d6ce"} Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.415540 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"ae53bce6-479b-4d55-9fb5-2441850bec4a","Type":"ContainerStarted","Data":"299074e4b1b3f563602f7d8849110e6f7c109b9f03863ea3387c93bfd9f7229a"} Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.417227 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"1a3aadfd-1e26-407f-98a8-c3f5681c2126","Type":"ContainerStarted","Data":"1906ed33fdaad9fa03888f00cc1a7811fe98a9f74c29e66aab1b64a489c00993"} Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.581387 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-8mqsj"] Jan 30 21:45:23 crc kubenswrapper[4721]: I0130 21:45:23.710041 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 30 21:45:24 crc kubenswrapper[4721]: I0130 21:45:24.102584 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a657244-7c25-4da8-9e58-484f521374f6" path="/var/lib/kubelet/pods/5a657244-7c25-4da8-9e58-484f521374f6/volumes" Jan 30 21:45:24 crc kubenswrapper[4721]: I0130 21:45:24.103077 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc00b8a9-5f34-4051-9c44-3e29e650027e" path="/var/lib/kubelet/pods/cc00b8a9-5f34-4051-9c44-3e29e650027e/volumes" Jan 30 21:45:24 crc kubenswrapper[4721]: I0130 21:45:24.425015 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"796cb9cb-aad7-4645-89ae-ae8764bfbe17","Type":"ContainerStarted","Data":"fa18aa6dab4a9613be474547cab05f65de9de184ce7f3cacc591db115bd12d30"} Jan 30 21:45:24 crc kubenswrapper[4721]: I0130 21:45:24.426503 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8mqsj" event={"ID":"b0f6d373-29bd-47a5-8cf5-3937fbc1498f","Type":"ContainerStarted","Data":"891dc8f6c6bd53b47dcd55d1a08aea8e4e582c82e30d65c540b5c1fee549979e"} Jan 30 21:45:24 crc kubenswrapper[4721]: I0130 21:45:24.428315 4721 generic.go:334] "Generic (PLEG): container finished" podID="d725937f-ee2d-431d-a0ed-94dd553cd014" containerID="229eb6ef07a4e86fefd23948eebf95b946fb84567533303c4f1bca3d2759366c" exitCode=0 Jan 30 21:45:24 crc kubenswrapper[4721]: I0130 21:45:24.428359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" event={"ID":"d725937f-ee2d-431d-a0ed-94dd553cd014","Type":"ContainerDied","Data":"229eb6ef07a4e86fefd23948eebf95b946fb84567533303c4f1bca3d2759366c"} Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.095506 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:45:28 crc kubenswrapper[4721]: E0130 21:45:28.100915 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.146882 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.237078 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d725937f-ee2d-431d-a0ed-94dd553cd014-config-volume\") pod \"d725937f-ee2d-431d-a0ed-94dd553cd014\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.237126 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d725937f-ee2d-431d-a0ed-94dd553cd014-secret-volume\") pod \"d725937f-ee2d-431d-a0ed-94dd553cd014\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.237154 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz92v\" (UniqueName: \"kubernetes.io/projected/d725937f-ee2d-431d-a0ed-94dd553cd014-kube-api-access-dz92v\") pod \"d725937f-ee2d-431d-a0ed-94dd553cd014\" (UID: \"d725937f-ee2d-431d-a0ed-94dd553cd014\") " Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.238187 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d725937f-ee2d-431d-a0ed-94dd553cd014-config-volume" (OuterVolumeSpecName: "config-volume") pod "d725937f-ee2d-431d-a0ed-94dd553cd014" (UID: "d725937f-ee2d-431d-a0ed-94dd553cd014"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.242760 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d725937f-ee2d-431d-a0ed-94dd553cd014-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d725937f-ee2d-431d-a0ed-94dd553cd014" (UID: "d725937f-ee2d-431d-a0ed-94dd553cd014"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.246668 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d725937f-ee2d-431d-a0ed-94dd553cd014-kube-api-access-dz92v" (OuterVolumeSpecName: "kube-api-access-dz92v") pod "d725937f-ee2d-431d-a0ed-94dd553cd014" (UID: "d725937f-ee2d-431d-a0ed-94dd553cd014"). InnerVolumeSpecName "kube-api-access-dz92v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.339573 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d725937f-ee2d-431d-a0ed-94dd553cd014-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.339603 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d725937f-ee2d-431d-a0ed-94dd553cd014-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.339613 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz92v\" (UniqueName: \"kubernetes.io/projected/d725937f-ee2d-431d-a0ed-94dd553cd014-kube-api-access-dz92v\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.467967 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" event={"ID":"d725937f-ee2d-431d-a0ed-94dd553cd014","Type":"ContainerDied","Data":"1aff202bae43238d52c979e758850674037f593ddc57d3e4296f5e362e4feeba"} Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.468046 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1aff202bae43238d52c979e758850674037f593ddc57d3e4296f5e362e4feeba" Jan 30 21:45:28 crc kubenswrapper[4721]: I0130 21:45:28.468061 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.495928 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b357dd79-4a4f-48c7-ba6f-058ca30785f5","Type":"ContainerStarted","Data":"4b5e1b9a82e3de2004b2855460b59a8a6d77f77c6ae46c4d49d8b5c214fc340c"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.496554 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.498101 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" event={"ID":"6e77f4bd-bf5d-4043-ae9e-e938a4e99b69","Type":"ContainerStarted","Data":"06781308941ac08420c0a7b8457243bec05d394beb3db3feea37b260c6bbeb67"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.498257 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.500468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" event={"ID":"f7847558-f6b6-4f0a-8fd3-45e0fef7fce4","Type":"ContainerStarted","Data":"8242aba97392b2aca093cf5c30734f39829a6bbd8f8b6abf81b97938cb826ff4"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.500573 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.502387 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3","Type":"ContainerStarted","Data":"94ea21b96c698e1511cad3d665b0760e590ea480a3dd03c6e4e785051d12764f"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.502507 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.507289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" event={"ID":"9d09968c-71f8-4bad-855b-ebc5abb78989","Type":"ContainerStarted","Data":"6c617aa1c8cdcc2118411d246451885be4b48159e2751388da486a053fb71160"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.508526 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.511789 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"ae53bce6-479b-4d55-9fb5-2441850bec4a","Type":"ContainerStarted","Data":"96db5a78f75527f749226cb504ea1117c6075a063f470c461928b32683be9c80"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.512710 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.518607 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.644962682 podStartE2EDuration="40.51858268s" podCreationTimestamp="2026-01-30 21:44:51 +0000 UTC" firstStartedPulling="2026-01-30 21:44:52.135522923 +0000 UTC m=+1680.927424169" lastFinishedPulling="2026-01-30 21:45:30.009142921 +0000 UTC m=+1718.801044167" observedRunningTime="2026-01-30 21:45:31.513028298 +0000 UTC m=+1720.304929544" watchObservedRunningTime="2026-01-30 21:45:31.51858268 +0000 UTC m=+1720.310483966" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.520709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" event={"ID":"3b976659-d481-4cd4-b1b1-72a7d465067d","Type":"ContainerStarted","Data":"5c1909fe3b798e468ae61f082b1ee7f440bcdde3a80291ea64740ab1ec8abad8"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.524050 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.527356 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"1a3aadfd-1e26-407f-98a8-c3f5681c2126","Type":"ContainerStarted","Data":"1c406ce378dd3e1ea135a9cf700c0b78c9e4c25eb851106e334c76336ed4222e"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.527524 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.535813 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" event={"ID":"a6518545-fdf0-4445-8e62-d7ca4816779d","Type":"ContainerStarted","Data":"1d5c4f083d87e83f7a4231239f0aa01f6245ddd4c7908c8fe64ad7cbcc7aaeec"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.536402 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.543106 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8mqsj" event={"ID":"b0f6d373-29bd-47a5-8cf5-3937fbc1498f","Type":"ContainerStarted","Data":"2dbbb5748b39cfa045b95c30d8f9282bad9ebf3768f5ce0ea590b403fc70df7b"} Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.547139 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.559788 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.567583 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" podStartSLOduration=23.177866792 podStartE2EDuration="31.567561336s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:21.659792865 +0000 UTC m=+1710.451694111" lastFinishedPulling="2026-01-30 21:45:30.049487399 +0000 UTC m=+1718.841388655" observedRunningTime="2026-01-30 21:45:31.564982506 +0000 UTC m=+1720.356883762" watchObservedRunningTime="2026-01-30 21:45:31.567561336 +0000 UTC m=+1720.359462622" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.570157 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=25.640119373 podStartE2EDuration="31.570144306s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:22.734631083 +0000 UTC m=+1711.526532339" lastFinishedPulling="2026-01-30 21:45:28.664656026 +0000 UTC m=+1717.456557272" observedRunningTime="2026-01-30 21:45:31.54023871 +0000 UTC m=+1720.332139976" watchObservedRunningTime="2026-01-30 21:45:31.570144306 +0000 UTC m=+1720.362045562" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.595104 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-2plvn" podStartSLOduration=24.600006999 podStartE2EDuration="31.595080367s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:21.671401754 +0000 UTC m=+1710.463303000" lastFinishedPulling="2026-01-30 21:45:28.666475122 +0000 UTC m=+1717.458376368" observedRunningTime="2026-01-30 21:45:31.586255964 +0000 UTC m=+1720.378157220" watchObservedRunningTime="2026-01-30 21:45:31.595080367 +0000 UTC m=+1720.386981623" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.618262 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" podStartSLOduration=23.236488066 podStartE2EDuration="31.618237264s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:21.6673795 +0000 UTC m=+1710.459280746" lastFinishedPulling="2026-01-30 21:45:30.049128678 +0000 UTC m=+1718.841029944" observedRunningTime="2026-01-30 21:45:31.603329383 +0000 UTC m=+1720.395230639" watchObservedRunningTime="2026-01-30 21:45:31.618237264 +0000 UTC m=+1720.410138520" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.631710 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=24.305533625 podStartE2EDuration="31.631691771s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:22.723034994 +0000 UTC m=+1711.514936290" lastFinishedPulling="2026-01-30 21:45:30.04919319 +0000 UTC m=+1718.841094436" observedRunningTime="2026-01-30 21:45:31.628923835 +0000 UTC m=+1720.420825091" watchObservedRunningTime="2026-01-30 21:45:31.631691771 +0000 UTC m=+1720.423593017" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.651288 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" podStartSLOduration=23.318767693 podStartE2EDuration="31.651266637s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:21.720727031 +0000 UTC m=+1710.512628277" lastFinishedPulling="2026-01-30 21:45:30.053225965 +0000 UTC m=+1718.845127221" observedRunningTime="2026-01-30 21:45:31.650003778 +0000 UTC m=+1720.441905024" watchObservedRunningTime="2026-01-30 21:45:31.651266637 +0000 UTC m=+1720.443167883" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.716087 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz" podStartSLOduration=23.336534323 podStartE2EDuration="31.716068092s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:21.669011331 +0000 UTC m=+1710.460912577" lastFinishedPulling="2026-01-30 21:45:30.0485451 +0000 UTC m=+1718.840446346" observedRunningTime="2026-01-30 21:45:31.6930473 +0000 UTC m=+1720.484948586" watchObservedRunningTime="2026-01-30 21:45:31.716068092 +0000 UTC m=+1720.507969348" Jan 30 21:45:31 crc kubenswrapper[4721]: I0130 21:45:31.716863 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=24.385841711 podStartE2EDuration="31.716849787s" podCreationTimestamp="2026-01-30 21:45:00 +0000 UTC" firstStartedPulling="2026-01-30 21:45:22.71837832 +0000 UTC m=+1711.510279566" lastFinishedPulling="2026-01-30 21:45:30.049386396 +0000 UTC m=+1718.841287642" observedRunningTime="2026-01-30 21:45:31.712331806 +0000 UTC m=+1720.504233062" watchObservedRunningTime="2026-01-30 21:45:31.716849787 +0000 UTC m=+1720.508751083" Jan 30 21:45:32 crc kubenswrapper[4721]: I0130 21:45:32.558575 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"63a5898f-2b47-44bb-85a0-1700940899c1","Type":"ContainerStarted","Data":"42653d9c31888cbe8f2b2343df1ccf81a88fad7d6364efe0a0cacc1a87e3b104"} Jan 30 21:45:32 crc kubenswrapper[4721]: I0130 21:45:32.561435 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"796cb9cb-aad7-4645-89ae-ae8764bfbe17","Type":"ContainerStarted","Data":"9f956df0e9431a15a7c98b3795d6d1e27799dcbf213c1b32db994170555f51bc"} Jan 30 21:45:32 crc kubenswrapper[4721]: I0130 21:45:32.564432 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d483e4e0-6513-44ce-b601-359b9c2262ca","Type":"ContainerStarted","Data":"7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec"} Jan 30 21:45:32 crc kubenswrapper[4721]: I0130 21:45:32.566257 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4k958" event={"ID":"522b5333-a647-446e-a261-b1828a1d20a3","Type":"ContainerStarted","Data":"5a048e27408dced447c5a49020e4bd19a4ecb525dbdcbb8b4ecdb6d5d8c57fa8"} Jan 30 21:45:32 crc kubenswrapper[4721]: I0130 21:45:32.631026 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4k958" podStartSLOduration=28.304975108 podStartE2EDuration="36.631010421s" podCreationTimestamp="2026-01-30 21:44:56 +0000 UTC" firstStartedPulling="2026-01-30 21:45:21.722413484 +0000 UTC m=+1710.514314730" lastFinishedPulling="2026-01-30 21:45:30.048448797 +0000 UTC m=+1718.840350043" observedRunningTime="2026-01-30 21:45:32.62773545 +0000 UTC m=+1721.419636706" watchObservedRunningTime="2026-01-30 21:45:32.631010421 +0000 UTC m=+1721.422911667" Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.575964 4721 generic.go:334] "Generic (PLEG): container finished" podID="b0f6d373-29bd-47a5-8cf5-3937fbc1498f" containerID="2dbbb5748b39cfa045b95c30d8f9282bad9ebf3768f5ce0ea590b403fc70df7b" exitCode=0 Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.576077 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8mqsj" event={"ID":"b0f6d373-29bd-47a5-8cf5-3937fbc1498f","Type":"ContainerDied","Data":"2dbbb5748b39cfa045b95c30d8f9282bad9ebf3768f5ce0ea590b403fc70df7b"} Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.578419 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"96303720-27c1-495f-8597-5891c08c5e06","Type":"ContainerStarted","Data":"46c1c7ca105167dd830b7aef2fac6c00e1415c59c25795356af1eca9d7297048"} Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.580160 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87b1d1f9-cf16-401a-b55d-a6d2434e0284","Type":"ContainerStarted","Data":"e90b7f560b30d2f5b9d72265417f7f6f648e43d3a6f6b596d969360d0ee8a365"} Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.581510 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1f120802-4119-4ed8-bf74-62b1e4a534bc","Type":"ContainerStarted","Data":"c0b0b5623f5f35b50de663a7d761c3d99abef05cd90b85da9b628b2b5a7c2233"} Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.582940 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8bff96ff-2424-4622-8c4d-d866a4b28b21","Type":"ContainerStarted","Data":"9970c1e30a33dc45e1b95aa3422e2f7df5aad38d88f3bbb7018f26655938f520"} Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.584845 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7548d1d0-371f-4bf8-a557-a9734c49a52e","Type":"ContainerStarted","Data":"bed0008b0294b53d54f21b5699c599c6cf52ef8aabd411ddce920ac8fdf0fd22"} Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.585761 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4k958" Jan 30 21:45:33 crc kubenswrapper[4721]: I0130 21:45:33.696551 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.284022957 podStartE2EDuration="44.696529511s" podCreationTimestamp="2026-01-30 21:44:49 +0000 UTC" firstStartedPulling="2026-01-30 21:44:50.427470696 +0000 UTC m=+1679.219371932" lastFinishedPulling="2026-01-30 21:45:32.83997724 +0000 UTC m=+1721.631878486" observedRunningTime="2026-01-30 21:45:33.693980152 +0000 UTC m=+1722.485881398" watchObservedRunningTime="2026-01-30 21:45:33.696529511 +0000 UTC m=+1722.488430747" Jan 30 21:45:34 crc kubenswrapper[4721]: I0130 21:45:34.860458 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.613830 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8mqsj" event={"ID":"b0f6d373-29bd-47a5-8cf5-3937fbc1498f","Type":"ContainerStarted","Data":"6ad439f5b31550542e784ad850e751c6d21e42083695fca889d5651d5b694acc"} Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.617008 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"63a5898f-2b47-44bb-85a0-1700940899c1","Type":"ContainerStarted","Data":"d2328656add06c3e2ab24ea870252bdfb3931b6a29f6462e98e8afcfa1d13826"} Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.619527 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"796cb9cb-aad7-4645-89ae-ae8764bfbe17","Type":"ContainerStarted","Data":"3cd0191dbc1baa43fd9b5bec2446791743249485149544d5dfd13531e25f248e"} Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.622729 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerStarted","Data":"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc"} Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.624217 4721 generic.go:334] "Generic (PLEG): container finished" podID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerID="358a49fd238a707e916ecd12b4fd51b978a031b6e96ca50991e9602c19ce2b18" exitCode=0 Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.624278 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" event={"ID":"a157c5e0-ff7c-4230-9762-55f2abe4df51","Type":"ContainerDied","Data":"358a49fd238a707e916ecd12b4fd51b978a031b6e96ca50991e9602c19ce2b18"} Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.626788 4721 generic.go:334] "Generic (PLEG): container finished" podID="2e533be2-6d15-4da5-aaba-332e873021a7" containerID="41b8e0dcc403da0588b0408f94fac470104e84c39825210c9205ef5412c2a4fe" exitCode=0 Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.626822 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" event={"ID":"2e533be2-6d15-4da5-aaba-332e873021a7","Type":"ContainerDied","Data":"41b8e0dcc403da0588b0408f94fac470104e84c39825210c9205ef5412c2a4fe"} Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.651547 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=28.899304341 podStartE2EDuration="41.651526683s" podCreationTimestamp="2026-01-30 21:44:55 +0000 UTC" firstStartedPulling="2026-01-30 21:45:23.264018668 +0000 UTC m=+1712.055919914" lastFinishedPulling="2026-01-30 21:45:36.016241 +0000 UTC m=+1724.808142256" observedRunningTime="2026-01-30 21:45:36.644577818 +0000 UTC m=+1725.436479064" watchObservedRunningTime="2026-01-30 21:45:36.651526683 +0000 UTC m=+1725.443427929" Jan 30 21:45:36 crc kubenswrapper[4721]: I0130 21:45:36.693941 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=27.425405876 podStartE2EDuration="39.693919515s" podCreationTimestamp="2026-01-30 21:44:57 +0000 UTC" firstStartedPulling="2026-01-30 21:45:23.80846594 +0000 UTC m=+1712.600367196" lastFinishedPulling="2026-01-30 21:45:36.076979589 +0000 UTC m=+1724.868880835" observedRunningTime="2026-01-30 21:45:36.682094599 +0000 UTC m=+1725.473995875" watchObservedRunningTime="2026-01-30 21:45:36.693919515 +0000 UTC m=+1725.485820771" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.295087 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.635932 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.638431 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8mqsj" event={"ID":"b0f6d373-29bd-47a5-8cf5-3937fbc1498f","Type":"ContainerStarted","Data":"2d2b22ff2765051d337b66a7e7568000be5de7c701fa495b2c89e6e6d560886a"} Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.638681 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.638829 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.641023 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" event={"ID":"a157c5e0-ff7c-4230-9762-55f2abe4df51","Type":"ContainerStarted","Data":"1f68385d9f1c4ca38b8358d88426662bc6c2b99a03fc4e76f719d2d178172aa6"} Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.641348 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.642973 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" event={"ID":"2e533be2-6d15-4da5-aaba-332e873021a7","Type":"ContainerStarted","Data":"f6c9e63b2ee0a71343b69a22d1c8d9584cac7abf0c180d4fa51019e895c597aa"} Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.671027 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-8mqsj" podStartSLOduration=35.470658526 podStartE2EDuration="41.671003777s" podCreationTimestamp="2026-01-30 21:44:56 +0000 UTC" firstStartedPulling="2026-01-30 21:45:23.808878072 +0000 UTC m=+1712.600779318" lastFinishedPulling="2026-01-30 21:45:30.009223313 +0000 UTC m=+1718.801124569" observedRunningTime="2026-01-30 21:45:37.665161576 +0000 UTC m=+1726.457062822" watchObservedRunningTime="2026-01-30 21:45:37.671003777 +0000 UTC m=+1726.462905013" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.695902 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" podStartSLOduration=2.5852961089999997 podStartE2EDuration="52.695880827s" podCreationTimestamp="2026-01-30 21:44:45 +0000 UTC" firstStartedPulling="2026-01-30 21:44:46.117989393 +0000 UTC m=+1674.909890639" lastFinishedPulling="2026-01-30 21:45:36.228574101 +0000 UTC m=+1725.020475357" observedRunningTime="2026-01-30 21:45:37.68983 +0000 UTC m=+1726.481731256" watchObservedRunningTime="2026-01-30 21:45:37.695880827 +0000 UTC m=+1726.487782083" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.710946 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" podStartSLOduration=2.864192021 podStartE2EDuration="52.710924453s" podCreationTimestamp="2026-01-30 21:44:45 +0000 UTC" firstStartedPulling="2026-01-30 21:44:46.384415909 +0000 UTC m=+1675.176317155" lastFinishedPulling="2026-01-30 21:45:36.231148341 +0000 UTC m=+1725.023049587" observedRunningTime="2026-01-30 21:45:37.710597343 +0000 UTC m=+1726.502498639" watchObservedRunningTime="2026-01-30 21:45:37.710924453 +0000 UTC m=+1726.502825709" Jan 30 21:45:37 crc kubenswrapper[4721]: I0130 21:45:37.720416 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.636098 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.679390 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.962288 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wwk6b"] Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.995182 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bzkf8"] Jan 30 21:45:38 crc kubenswrapper[4721]: E0130 21:45:38.995582 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d725937f-ee2d-431d-a0ed-94dd553cd014" containerName="collect-profiles" Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.995599 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d725937f-ee2d-431d-a0ed-94dd553cd014" containerName="collect-profiles" Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.995770 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d725937f-ee2d-431d-a0ed-94dd553cd014" containerName="collect-profiles" Jan 30 21:45:38 crc kubenswrapper[4721]: I0130 21:45:38.996709 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.000684 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.024838 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bzkf8"] Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.126140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twn4w\" (UniqueName: \"kubernetes.io/projected/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-kube-api-access-twn4w\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.126223 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-config\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.126492 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.126521 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.126833 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-fmzsr"] Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.128372 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.134171 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.146103 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-fmzsr"] Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228280 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133e2a95-0b74-4b44-9ea1-d6a37d548876-combined-ca-bundle\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228420 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/133e2a95-0b74-4b44-9ea1-d6a37d548876-ovn-rundir\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228559 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/133e2a95-0b74-4b44-9ea1-d6a37d548876-ovs-rundir\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twn4w\" (UniqueName: \"kubernetes.io/projected/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-kube-api-access-twn4w\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228633 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcrj8\" (UniqueName: \"kubernetes.io/projected/133e2a95-0b74-4b44-9ea1-d6a37d548876-kube-api-access-bcrj8\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228660 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/133e2a95-0b74-4b44-9ea1-d6a37d548876-config\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228728 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-config\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228775 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/133e2a95-0b74-4b44-9ea1-d6a37d548876-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228835 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.228871 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.229940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.229981 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.230589 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-config\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.250516 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twn4w\" (UniqueName: \"kubernetes.io/projected/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-kube-api-access-twn4w\") pod \"dnsmasq-dns-7f896c8c65-bzkf8\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.295912 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330144 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133e2a95-0b74-4b44-9ea1-d6a37d548876-combined-ca-bundle\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330207 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/133e2a95-0b74-4b44-9ea1-d6a37d548876-ovn-rundir\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330549 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/133e2a95-0b74-4b44-9ea1-d6a37d548876-ovn-rundir\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330641 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/133e2a95-0b74-4b44-9ea1-d6a37d548876-ovs-rundir\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330709 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/133e2a95-0b74-4b44-9ea1-d6a37d548876-ovs-rundir\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcrj8\" (UniqueName: \"kubernetes.io/projected/133e2a95-0b74-4b44-9ea1-d6a37d548876-kube-api-access-bcrj8\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.330869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/133e2a95-0b74-4b44-9ea1-d6a37d548876-config\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.331272 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/133e2a95-0b74-4b44-9ea1-d6a37d548876-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.331770 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/133e2a95-0b74-4b44-9ea1-d6a37d548876-config\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.333694 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.335617 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133e2a95-0b74-4b44-9ea1-d6a37d548876-combined-ca-bundle\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.355208 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/133e2a95-0b74-4b44-9ea1-d6a37d548876-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.355473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcrj8\" (UniqueName: \"kubernetes.io/projected/133e2a95-0b74-4b44-9ea1-d6a37d548876-kube-api-access-bcrj8\") pod \"ovn-controller-metrics-fmzsr\" (UID: \"133e2a95-0b74-4b44-9ea1-d6a37d548876\") " pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.362500 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.466190 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-fmzsr" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.486864 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pwbw8"] Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.487257 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.487217 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" containerName="dnsmasq-dns" containerID="cri-o://f6c9e63b2ee0a71343b69a22d1c8d9584cac7abf0c180d4fa51019e895c597aa" gracePeriod=10 Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.543579 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-2d554"] Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.545008 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.549550 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.571197 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-2d554"] Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.656821 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdfz8\" (UniqueName: \"kubernetes.io/projected/7ef41c62-9890-463d-8888-539c1ab07cec-kube-api-access-zdfz8\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.657418 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-config\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.657486 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.657556 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.657590 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.677915 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerName="dnsmasq-dns" containerID="cri-o://1f68385d9f1c4ca38b8358d88426662bc6c2b99a03fc4e76f719d2d178172aa6" gracePeriod=10 Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.739330 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.764030 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdfz8\" (UniqueName: \"kubernetes.io/projected/7ef41c62-9890-463d-8888-539c1ab07cec-kube-api-access-zdfz8\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.764098 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-config\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.764163 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.765280 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.765772 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.765831 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.765835 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-config\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.766551 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.767078 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.800837 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdfz8\" (UniqueName: \"kubernetes.io/projected/7ef41c62-9890-463d-8888-539c1ab07cec-kube-api-access-zdfz8\") pod \"dnsmasq-dns-86db49b7ff-2d554\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.859512 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.917105 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:39 crc kubenswrapper[4721]: I0130 21:45:39.974234 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bzkf8"] Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.125689 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-fmzsr"] Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.138597 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.163616 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.168957 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.169069 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.170188 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.204228 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-4ldtp" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.221843 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.327894 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cbab3069-54ee-4146-b912-5e59c0039f86-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.328262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gbnh\" (UniqueName: \"kubernetes.io/projected/cbab3069-54ee-4146-b912-5e59c0039f86-kube-api-access-7gbnh\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.328391 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cbab3069-54ee-4146-b912-5e59c0039f86-scripts\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.328456 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.328493 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbab3069-54ee-4146-b912-5e59c0039f86-config\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.328610 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.328654 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430086 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430216 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cbab3069-54ee-4146-b912-5e59c0039f86-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430249 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gbnh\" (UniqueName: \"kubernetes.io/projected/cbab3069-54ee-4146-b912-5e59c0039f86-kube-api-access-7gbnh\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cbab3069-54ee-4146-b912-5e59c0039f86-scripts\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430331 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.430358 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbab3069-54ee-4146-b912-5e59c0039f86-config\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.431164 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbab3069-54ee-4146-b912-5e59c0039f86-config\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.431967 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cbab3069-54ee-4146-b912-5e59c0039f86-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.432460 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cbab3069-54ee-4146-b912-5e59c0039f86-scripts\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.434794 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.434913 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.450503 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbab3069-54ee-4146-b912-5e59c0039f86-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.461359 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gbnh\" (UniqueName: \"kubernetes.io/projected/cbab3069-54ee-4146-b912-5e59c0039f86-kube-api-access-7gbnh\") pod \"ovn-northd-0\" (UID: \"cbab3069-54ee-4146-b912-5e59c0039f86\") " pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.532965 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.687233 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bff96ff-2424-4622-8c4d-d866a4b28b21" containerID="9970c1e30a33dc45e1b95aa3422e2f7df5aad38d88f3bbb7018f26655938f520" exitCode=0 Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.687336 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8bff96ff-2424-4622-8c4d-d866a4b28b21","Type":"ContainerDied","Data":"9970c1e30a33dc45e1b95aa3422e2f7df5aad38d88f3bbb7018f26655938f520"} Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.690168 4721 generic.go:334] "Generic (PLEG): container finished" podID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerID="1f68385d9f1c4ca38b8358d88426662bc6c2b99a03fc4e76f719d2d178172aa6" exitCode=0 Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.690261 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" event={"ID":"a157c5e0-ff7c-4230-9762-55f2abe4df51","Type":"ContainerDied","Data":"1f68385d9f1c4ca38b8358d88426662bc6c2b99a03fc4e76f719d2d178172aa6"} Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.692153 4721 generic.go:334] "Generic (PLEG): container finished" podID="2e533be2-6d15-4da5-aaba-332e873021a7" containerID="f6c9e63b2ee0a71343b69a22d1c8d9584cac7abf0c180d4fa51019e895c597aa" exitCode=0 Jan 30 21:45:40 crc kubenswrapper[4721]: I0130 21:45:40.692387 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" event={"ID":"2e533be2-6d15-4da5-aaba-332e873021a7","Type":"ContainerDied","Data":"f6c9e63b2ee0a71343b69a22d1c8d9584cac7abf0c180d4fa51019e895c597aa"} Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.504772 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.533017 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bzkf8"] Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.584031 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-msqq2"] Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.585923 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.596109 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-msqq2"] Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.701989 4721 generic.go:334] "Generic (PLEG): container finished" podID="87b1d1f9-cf16-401a-b55d-a6d2434e0284" containerID="e90b7f560b30d2f5b9d72265417f7f6f648e43d3a6f6b596d969360d0ee8a365" exitCode=0 Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.702076 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87b1d1f9-cf16-401a-b55d-a6d2434e0284","Type":"ContainerDied","Data":"e90b7f560b30d2f5b9d72265417f7f6f648e43d3a6f6b596d969360d0ee8a365"} Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.755123 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-475qv\" (UniqueName: \"kubernetes.io/projected/d9fe7811-2c66-433e-9173-a670957604bc-kube-api-access-475qv\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.755238 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-dns-svc\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.755271 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.755315 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-config\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.755345 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.857090 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-475qv\" (UniqueName: \"kubernetes.io/projected/d9fe7811-2c66-433e-9173-a670957604bc-kube-api-access-475qv\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.857272 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-dns-svc\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.857337 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.857380 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-config\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.857435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.858205 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-dns-svc\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.859046 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.859233 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.859455 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-config\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.879251 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-475qv\" (UniqueName: \"kubernetes.io/projected/d9fe7811-2c66-433e-9173-a670957604bc-kube-api-access-475qv\") pod \"dnsmasq-dns-698758b865-msqq2\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:41 crc kubenswrapper[4721]: I0130 21:45:41.925482 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.099835 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:45:42 crc kubenswrapper[4721]: E0130 21:45:42.100148 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.640835 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.646632 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.649040 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.649112 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.649542 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.649698 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-49mf2" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.674518 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.725137 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-fmzsr" event={"ID":"133e2a95-0b74-4b44-9ea1-d6a37d548876","Type":"ContainerStarted","Data":"c54a8699625140c535bb3ed7ce7bb5679a01cea7780c36a7522aba73b4430148"} Jan 30 21:45:42 crc kubenswrapper[4721]: W0130 21:45:42.746058 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd8f88ac_09d8_4a41_bb6c_e20ce3fd59ca.slice/crio-55c576a66c5a0c3a7867093f5c95b2ba59fee5deedc7f3d7702e7e15c9b40296 WatchSource:0}: Error finding container 55c576a66c5a0c3a7867093f5c95b2ba59fee5deedc7f3d7702e7e15c9b40296: Status 404 returned error can't find the container with id 55c576a66c5a0c3a7867093f5c95b2ba59fee5deedc7f3d7702e7e15c9b40296 Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.774529 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fbc194de-ea06-4d56-a35a-4b63a46651df-cache\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.774887 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.775096 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fbc194de-ea06-4d56-a35a-4b63a46651df-lock\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.775167 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.775214 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fbbb\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-kube-api-access-7fbbb\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.775254 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc194de-ea06-4d56-a35a-4b63a46651df-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.884581 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fbc194de-ea06-4d56-a35a-4b63a46651df-lock\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.884666 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.884713 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fbbb\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-kube-api-access-7fbbb\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.884773 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc194de-ea06-4d56-a35a-4b63a46651df-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.884823 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fbc194de-ea06-4d56-a35a-4b63a46651df-cache\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.884892 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.885041 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fbc194de-ea06-4d56-a35a-4b63a46651df-lock\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: E0130 21:45:42.885404 4721 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 21:45:42 crc kubenswrapper[4721]: E0130 21:45:42.885435 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 21:45:42 crc kubenswrapper[4721]: E0130 21:45:42.885470 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift podName:fbc194de-ea06-4d56-a35a-4b63a46651df nodeName:}" failed. No retries permitted until 2026-01-30 21:45:43.385455992 +0000 UTC m=+1732.177357238 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift") pod "swift-storage-0" (UID: "fbc194de-ea06-4d56-a35a-4b63a46651df") : configmap "swift-ring-files" not found Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.885723 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fbc194de-ea06-4d56-a35a-4b63a46651df-cache\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.899569 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.899778 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c038f27538ebee87efbb3015411413b5374f5dd71962c463dcbfd584be792f04/globalmount\"" pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.906619 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc194de-ea06-4d56-a35a-4b63a46651df-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.916696 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fbbb\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-kube-api-access-7fbbb\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:42 crc kubenswrapper[4721]: I0130 21:45:42.979248 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0cf410f-ad06-4bf8-b92e-7482554a11c0\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.364639 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-msqq2"] Jan 30 21:45:43 crc kubenswrapper[4721]: W0130 21:45:43.368451 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9fe7811_2c66_433e_9173_a670957604bc.slice/crio-0967ed73e0df7213d9d02726d55c62876ba094a2d77367e276c04bcf49acc5bb WatchSource:0}: Error finding container 0967ed73e0df7213d9d02726d55c62876ba094a2d77367e276c04bcf49acc5bb: Status 404 returned error can't find the container with id 0967ed73e0df7213d9d02726d55c62876ba094a2d77367e276c04bcf49acc5bb Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.396069 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:43 crc kubenswrapper[4721]: E0130 21:45:43.396416 4721 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 21:45:43 crc kubenswrapper[4721]: E0130 21:45:43.396646 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 21:45:43 crc kubenswrapper[4721]: E0130 21:45:43.396740 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift podName:fbc194de-ea06-4d56-a35a-4b63a46651df nodeName:}" failed. No retries permitted until 2026-01-30 21:45:44.396723557 +0000 UTC m=+1733.188624803 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift") pod "swift-storage-0" (UID: "fbc194de-ea06-4d56-a35a-4b63a46651df") : configmap "swift-ring-files" not found Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.469047 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 30 21:45:43 crc kubenswrapper[4721]: W0130 21:45:43.473532 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbab3069_54ee_4146_b912_5e59c0039f86.slice/crio-a45170729bd0d3b77dd58b39ce1eb6a73ef8d637c7741daed48c5e09dd1b44a9 WatchSource:0}: Error finding container a45170729bd0d3b77dd58b39ce1eb6a73ef8d637c7741daed48c5e09dd1b44a9: Status 404 returned error can't find the container with id a45170729bd0d3b77dd58b39ce1eb6a73ef8d637c7741daed48c5e09dd1b44a9 Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.481924 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-2d554"] Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.735572 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cbab3069-54ee-4146-b912-5e59c0039f86","Type":"ContainerStarted","Data":"a45170729bd0d3b77dd58b39ce1eb6a73ef8d637c7741daed48c5e09dd1b44a9"} Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.739151 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-msqq2" event={"ID":"d9fe7811-2c66-433e-9173-a670957604bc","Type":"ContainerStarted","Data":"0967ed73e0df7213d9d02726d55c62876ba094a2d77367e276c04bcf49acc5bb"} Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.740880 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" event={"ID":"7ef41c62-9890-463d-8888-539c1ab07cec","Type":"ContainerStarted","Data":"773b0a8cb7429a7fa7eb975d9a86c3abfa4cd0c8b22e385398be71c1675b474c"} Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.744548 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" event={"ID":"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca","Type":"ContainerStarted","Data":"55c576a66c5a0c3a7867093f5c95b2ba59fee5deedc7f3d7702e7e15c9b40296"} Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.747730 4721 generic.go:334] "Generic (PLEG): container finished" podID="96303720-27c1-495f-8597-5891c08c5e06" containerID="46c1c7ca105167dd830b7aef2fac6c00e1415c59c25795356af1eca9d7297048" exitCode=0 Jan 30 21:45:43 crc kubenswrapper[4721]: I0130 21:45:43.747792 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"96303720-27c1-495f-8597-5891c08c5e06","Type":"ContainerDied","Data":"46c1c7ca105167dd830b7aef2fac6c00e1415c59c25795356af1eca9d7297048"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.418751 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:44 crc kubenswrapper[4721]: E0130 21:45:44.418884 4721 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 21:45:44 crc kubenswrapper[4721]: E0130 21:45:44.419648 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 21:45:44 crc kubenswrapper[4721]: E0130 21:45:44.419707 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift podName:fbc194de-ea06-4d56-a35a-4b63a46651df nodeName:}" failed. No retries permitted until 2026-01-30 21:45:46.419687379 +0000 UTC m=+1735.211588615 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift") pod "swift-storage-0" (UID: "fbc194de-ea06-4d56-a35a-4b63a46651df") : configmap "swift-ring-files" not found Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.440720 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.450207 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.522362 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-dns-svc\") pod \"a157c5e0-ff7c-4230-9762-55f2abe4df51\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.522548 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-config\") pod \"2e533be2-6d15-4da5-aaba-332e873021a7\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.522577 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-config\") pod \"a157c5e0-ff7c-4230-9762-55f2abe4df51\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.522635 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw5kf\" (UniqueName: \"kubernetes.io/projected/a157c5e0-ff7c-4230-9762-55f2abe4df51-kube-api-access-xw5kf\") pod \"a157c5e0-ff7c-4230-9762-55f2abe4df51\" (UID: \"a157c5e0-ff7c-4230-9762-55f2abe4df51\") " Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.522681 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-dns-svc\") pod \"2e533be2-6d15-4da5-aaba-332e873021a7\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.522764 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkw48\" (UniqueName: \"kubernetes.io/projected/2e533be2-6d15-4da5-aaba-332e873021a7-kube-api-access-hkw48\") pod \"2e533be2-6d15-4da5-aaba-332e873021a7\" (UID: \"2e533be2-6d15-4da5-aaba-332e873021a7\") " Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.554651 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a157c5e0-ff7c-4230-9762-55f2abe4df51-kube-api-access-xw5kf" (OuterVolumeSpecName: "kube-api-access-xw5kf") pod "a157c5e0-ff7c-4230-9762-55f2abe4df51" (UID: "a157c5e0-ff7c-4230-9762-55f2abe4df51"). InnerVolumeSpecName "kube-api-access-xw5kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.555807 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e533be2-6d15-4da5-aaba-332e873021a7-kube-api-access-hkw48" (OuterVolumeSpecName: "kube-api-access-hkw48") pod "2e533be2-6d15-4da5-aaba-332e873021a7" (UID: "2e533be2-6d15-4da5-aaba-332e873021a7"). InnerVolumeSpecName "kube-api-access-hkw48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.641718 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a157c5e0-ff7c-4230-9762-55f2abe4df51" (UID: "a157c5e0-ff7c-4230-9762-55f2abe4df51"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.644427 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw5kf\" (UniqueName: \"kubernetes.io/projected/a157c5e0-ff7c-4230-9762-55f2abe4df51-kube-api-access-xw5kf\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.644461 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkw48\" (UniqueName: \"kubernetes.io/projected/2e533be2-6d15-4da5-aaba-332e873021a7-kube-api-access-hkw48\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.644474 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.692975 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-config" (OuterVolumeSpecName: "config") pod "a157c5e0-ff7c-4230-9762-55f2abe4df51" (UID: "a157c5e0-ff7c-4230-9762-55f2abe4df51"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.710249 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2e533be2-6d15-4da5-aaba-332e873021a7" (UID: "2e533be2-6d15-4da5-aaba-332e873021a7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.718546 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-config" (OuterVolumeSpecName: "config") pod "2e533be2-6d15-4da5-aaba-332e873021a7" (UID: "2e533be2-6d15-4da5-aaba-332e873021a7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.745702 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.745744 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a157c5e0-ff7c-4230-9762-55f2abe4df51-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.745758 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e533be2-6d15-4da5-aaba-332e873021a7-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.760267 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" event={"ID":"2e533be2-6d15-4da5-aaba-332e873021a7","Type":"ContainerDied","Data":"90d8bef3921161a3fe7a181c39bba6f693aef2b4e2f727717b73a9cca21a5084"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.760355 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-pwbw8" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.760628 4721 scope.go:117] "RemoveContainer" containerID="f6c9e63b2ee0a71343b69a22d1c8d9584cac7abf0c180d4fa51019e895c597aa" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.761822 4721 generic.go:334] "Generic (PLEG): container finished" podID="d9fe7811-2c66-433e-9173-a670957604bc" containerID="50b2e3fdcd8aae947dc91d4ba748ece22d316a74431b6d5f60d179b765ea4d3b" exitCode=0 Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.761900 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-msqq2" event={"ID":"d9fe7811-2c66-433e-9173-a670957604bc","Type":"ContainerDied","Data":"50b2e3fdcd8aae947dc91d4ba748ece22d316a74431b6d5f60d179b765ea4d3b"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.764438 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8bff96ff-2424-4622-8c4d-d866a4b28b21","Type":"ContainerStarted","Data":"4a253e1143a290f1aa5d7cefcf68549c66a5eae528f33b25e96e99bca3d678f9"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.770721 4721 generic.go:334] "Generic (PLEG): container finished" podID="dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" containerID="90734e2c9ff4fd1f3109e091e3f5e13777bfaaa055822f10ad532ee5564e9445" exitCode=0 Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.770801 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" event={"ID":"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca","Type":"ContainerDied","Data":"90734e2c9ff4fd1f3109e091e3f5e13777bfaaa055822f10ad532ee5564e9445"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.773003 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"96303720-27c1-495f-8597-5891c08c5e06","Type":"ContainerStarted","Data":"e5076b2c0ca744e59e99cb15ac60f36ef5e68c40411cdfda83aee9a006a50b03"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.776596 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-fmzsr" event={"ID":"133e2a95-0b74-4b44-9ea1-d6a37d548876","Type":"ContainerStarted","Data":"64963724ac9a9e9a56726e9edceb0c65e7a0a56f22ba97328fc5a484b40de418"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.783889 4721 generic.go:334] "Generic (PLEG): container finished" podID="7ef41c62-9890-463d-8888-539c1ab07cec" containerID="70391a91f3d63e3405389cb80334e59209a71f8dcdf180c9189cf156d77755b2" exitCode=0 Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.783982 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" event={"ID":"7ef41c62-9890-463d-8888-539c1ab07cec","Type":"ContainerDied","Data":"70391a91f3d63e3405389cb80334e59209a71f8dcdf180c9189cf156d77755b2"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.790450 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerDied","Data":"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.790387 4721 generic.go:334] "Generic (PLEG): container finished" podID="8306e740-fd1d-459e-a0db-fc01a639f991" containerID="d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc" exitCode=0 Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.816130 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" event={"ID":"a157c5e0-ff7c-4230-9762-55f2abe4df51","Type":"ContainerDied","Data":"0084074702c3e4bec933c31d3fa0058d796f92881474cf33ba2aadc333df6ea3"} Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.816234 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wwk6b" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.832465 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371980.02234 podStartE2EDuration="56.832434835s" podCreationTimestamp="2026-01-30 21:44:48 +0000 UTC" firstStartedPulling="2026-01-30 21:44:50.321226528 +0000 UTC m=+1679.113127774" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:45:44.828175012 +0000 UTC m=+1733.620076258" watchObservedRunningTime="2026-01-30 21:45:44.832434835 +0000 UTC m=+1733.624336081" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.851417 4721 scope.go:117] "RemoveContainer" containerID="41b8e0dcc403da0588b0408f94fac470104e84c39825210c9205ef5412c2a4fe" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.899514 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-fmzsr" podStartSLOduration=5.89949968 podStartE2EDuration="5.89949968s" podCreationTimestamp="2026-01-30 21:45:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:45:44.899259653 +0000 UTC m=+1733.691160899" watchObservedRunningTime="2026-01-30 21:45:44.89949968 +0000 UTC m=+1733.691400926" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.929937 4721 scope.go:117] "RemoveContainer" containerID="1f68385d9f1c4ca38b8358d88426662bc6c2b99a03fc4e76f719d2d178172aa6" Jan 30 21:45:44 crc kubenswrapper[4721]: I0130 21:45:44.951849 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=16.110126453 podStartE2EDuration="58.95182802s" podCreationTimestamp="2026-01-30 21:44:46 +0000 UTC" firstStartedPulling="2026-01-30 21:44:49.073360725 +0000 UTC m=+1677.865261971" lastFinishedPulling="2026-01-30 21:45:31.915062292 +0000 UTC m=+1720.706963538" observedRunningTime="2026-01-30 21:45:44.949666543 +0000 UTC m=+1733.741567789" watchObservedRunningTime="2026-01-30 21:45:44.95182802 +0000 UTC m=+1733.743729266" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.007198 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pwbw8"] Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.023783 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-pwbw8"] Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.042227 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wwk6b"] Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.048574 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wwk6b"] Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.088819 4721 scope.go:117] "RemoveContainer" containerID="358a49fd238a707e916ecd12b4fd51b978a031b6e96ca50991e9602c19ce2b18" Jan 30 21:45:45 crc kubenswrapper[4721]: E0130 21:45:45.276553 4721 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 30 21:45:45 crc kubenswrapper[4721]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/7ef41c62-9890-463d-8888-539c1ab07cec/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 30 21:45:45 crc kubenswrapper[4721]: > podSandboxID="773b0a8cb7429a7fa7eb975d9a86c3abfa4cd0c8b22e385398be71c1675b474c" Jan 30 21:45:45 crc kubenswrapper[4721]: E0130 21:45:45.276759 4721 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 30 21:45:45 crc kubenswrapper[4721]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n599h5cbh7ch5d4h66fh676hdbh546h95h88h5ffh55ch7fhch57ch687hddhc7h5fdh57dh674h56fh64ch98h9bh557h55dh646h54ch54fh5c4h597q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zdfz8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86db49b7ff-2d554_openstack(7ef41c62-9890-463d-8888-539c1ab07cec): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/7ef41c62-9890-463d-8888-539c1ab07cec/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 30 21:45:45 crc kubenswrapper[4721]: > logger="UnhandledError" Jan 30 21:45:45 crc kubenswrapper[4721]: E0130 21:45:45.277949 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/7ef41c62-9890-463d-8888-539c1ab07cec/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.331591 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.369582 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-ovsdbserver-sb\") pod \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.369779 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-config\") pod \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.369822 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twn4w\" (UniqueName: \"kubernetes.io/projected/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-kube-api-access-twn4w\") pod \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.369878 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-dns-svc\") pod \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\" (UID: \"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca\") " Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.375211 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-kube-api-access-twn4w" (OuterVolumeSpecName: "kube-api-access-twn4w") pod "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" (UID: "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca"). InnerVolumeSpecName "kube-api-access-twn4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.401624 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" (UID: "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.402355 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-config" (OuterVolumeSpecName: "config") pod "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" (UID: "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.402355 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" (UID: "dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.472664 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.472728 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.472744 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twn4w\" (UniqueName: \"kubernetes.io/projected/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-kube-api-access-twn4w\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.472759 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:45:45 crc kubenswrapper[4721]: E0130 21:45:45.678364 4721 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.20:37256->38.102.83.20:38213: write tcp 38.102.83.20:37256->38.102.83.20:38213: write: broken pipe Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.826623 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-msqq2" event={"ID":"d9fe7811-2c66-433e-9173-a670957604bc","Type":"ContainerStarted","Data":"2d7d34b57b2e76e29321e225d0442c7d40d4a8bd2e4d1d8d986b6e36f79e50c6"} Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.827452 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.833673 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.835420 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-bzkf8" event={"ID":"dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca","Type":"ContainerDied","Data":"55c576a66c5a0c3a7867093f5c95b2ba59fee5deedc7f3d7702e7e15c9b40296"} Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.835470 4721 scope.go:117] "RemoveContainer" containerID="90734e2c9ff4fd1f3109e091e3f5e13777bfaaa055822f10ad532ee5564e9445" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.853605 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-msqq2" podStartSLOduration=4.853583861 podStartE2EDuration="4.853583861s" podCreationTimestamp="2026-01-30 21:45:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:45:45.84645051 +0000 UTC m=+1734.638351756" watchObservedRunningTime="2026-01-30 21:45:45.853583861 +0000 UTC m=+1734.645485107" Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.938105 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bzkf8"] Jan 30 21:45:45 crc kubenswrapper[4721]: I0130 21:45:45.946780 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bzkf8"] Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.124327 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" path="/var/lib/kubelet/pods/2e533be2-6d15-4da5-aaba-332e873021a7/volumes" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.125104 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" path="/var/lib/kubelet/pods/a157c5e0-ff7c-4230-9762-55f2abe4df51/volumes" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.125843 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" path="/var/lib/kubelet/pods/dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca/volumes" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.490470 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.490715 4721 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.490739 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.490796 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift podName:fbc194de-ea06-4d56-a35a-4b63a46651df nodeName:}" failed. No retries permitted until 2026-01-30 21:45:50.490778893 +0000 UTC m=+1739.282680139 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift") pod "swift-storage-0" (UID: "fbc194de-ea06-4d56-a35a-4b63a46651df") : configmap "swift-ring-files" not found Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.588728 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-hkfr7"] Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.589174 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589193 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.589210 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerName="dnsmasq-dns" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589219 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerName="dnsmasq-dns" Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.589236 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" containerName="dnsmasq-dns" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589245 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" containerName="dnsmasq-dns" Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.589269 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589276 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: E0130 21:45:46.589290 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589313 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589554 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e533be2-6d15-4da5-aaba-332e873021a7" containerName="dnsmasq-dns" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589567 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd8f88ac-09d8-4a41-bb6c-e20ce3fd59ca" containerName="init" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.589580 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="a157c5e0-ff7c-4230-9762-55f2abe4df51" containerName="dnsmasq-dns" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.590373 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.593720 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.596318 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.597829 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.599113 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hkfr7"] Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.695402 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/81c15104-7d30-43d8-9e3d-9ab1834959da-etc-swift\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.695519 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-swiftconf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.695560 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-scripts\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.695618 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-dispersionconf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.695845 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-combined-ca-bundle\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.695937 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-ring-data-devices\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.696012 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhpcf\" (UniqueName: \"kubernetes.io/projected/81c15104-7d30-43d8-9e3d-9ab1834959da-kube-api-access-rhpcf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798064 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-swiftconf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798140 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-scripts\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798214 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-dispersionconf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798323 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-combined-ca-bundle\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798365 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-ring-data-devices\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798405 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhpcf\" (UniqueName: \"kubernetes.io/projected/81c15104-7d30-43d8-9e3d-9ab1834959da-kube-api-access-rhpcf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.798463 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/81c15104-7d30-43d8-9e3d-9ab1834959da-etc-swift\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.799084 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/81c15104-7d30-43d8-9e3d-9ab1834959da-etc-swift\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.799180 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-scripts\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.799738 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-ring-data-devices\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.805538 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-swiftconf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.806568 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-combined-ca-bundle\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.809789 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-dispersionconf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.829004 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhpcf\" (UniqueName: \"kubernetes.io/projected/81c15104-7d30-43d8-9e3d-9ab1834959da-kube-api-access-rhpcf\") pod \"swift-ring-rebalance-hkfr7\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:46 crc kubenswrapper[4721]: I0130 21:45:46.919732 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:45:47 crc kubenswrapper[4721]: E0130 21:45:47.819021 4721 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.20:37270->38.102.83.20:38213: write tcp 38.102.83.20:37270->38.102.83.20:38213: write: broken pipe Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.865877 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87b1d1f9-cf16-401a-b55d-a6d2434e0284","Type":"ContainerStarted","Data":"1265b2152b31be46fbca1b54fd9091dda9a45ed9b0e3d5ba2a2ab60762164f33"} Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.868019 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cbab3069-54ee-4146-b912-5e59c0039f86","Type":"ContainerStarted","Data":"8e97b836bebed7cb0c160d1d87e3fa3abc172cfe196ae8b2d0a2eb7d3e787f5c"} Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.868046 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cbab3069-54ee-4146-b912-5e59c0039f86","Type":"ContainerStarted","Data":"83240cb778c238ae6ff0acf49f875f87f65b0363c1f5391c9764d862f1a6f059"} Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.868208 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.878870 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" event={"ID":"7ef41c62-9890-463d-8888-539c1ab07cec","Type":"ContainerStarted","Data":"fd4681814f42c206374d5d62ac927e43b90d6f4dcb6d0ce2fdeefbc5e54c9ebc"} Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.879170 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.882675 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hkfr7"] Jan 30 21:45:47 crc kubenswrapper[4721]: W0130 21:45:47.889093 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81c15104_7d30_43d8_9e3d_9ab1834959da.slice/crio-9f38989cfefc5dc73e7aa42aa18366bbfeb2f7316607533fab5a6e5f3784ef7e WatchSource:0}: Error finding container 9f38989cfefc5dc73e7aa42aa18366bbfeb2f7316607533fab5a6e5f3784ef7e: Status 404 returned error can't find the container with id 9f38989cfefc5dc73e7aa42aa18366bbfeb2f7316607533fab5a6e5f3784ef7e Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.899155 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=4.034913879 podStartE2EDuration="7.899133063s" podCreationTimestamp="2026-01-30 21:45:40 +0000 UTC" firstStartedPulling="2026-01-30 21:45:43.475940139 +0000 UTC m=+1732.267841385" lastFinishedPulling="2026-01-30 21:45:47.340159323 +0000 UTC m=+1736.132060569" observedRunningTime="2026-01-30 21:45:47.892372165 +0000 UTC m=+1736.684273441" watchObservedRunningTime="2026-01-30 21:45:47.899133063 +0000 UTC m=+1736.691034309" Jan 30 21:45:47 crc kubenswrapper[4721]: I0130 21:45:47.921317 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" podStartSLOduration=8.921271299 podStartE2EDuration="8.921271299s" podCreationTimestamp="2026-01-30 21:45:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:45:47.911793736 +0000 UTC m=+1736.703694992" watchObservedRunningTime="2026-01-30 21:45:47.921271299 +0000 UTC m=+1736.713172555" Jan 30 21:45:48 crc kubenswrapper[4721]: I0130 21:45:48.195271 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 30 21:45:48 crc kubenswrapper[4721]: I0130 21:45:48.195342 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 30 21:45:48 crc kubenswrapper[4721]: I0130 21:45:48.892452 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hkfr7" event={"ID":"81c15104-7d30-43d8-9e3d-9ab1834959da","Type":"ContainerStarted","Data":"9f38989cfefc5dc73e7aa42aa18366bbfeb2f7316607533fab5a6e5f3784ef7e"} Jan 30 21:45:50 crc kubenswrapper[4721]: I0130 21:45:50.067687 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 30 21:45:50 crc kubenswrapper[4721]: I0130 21:45:50.071989 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 30 21:45:50 crc kubenswrapper[4721]: I0130 21:45:50.241263 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 30 21:45:50 crc kubenswrapper[4721]: I0130 21:45:50.582848 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:50 crc kubenswrapper[4721]: E0130 21:45:50.583109 4721 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 21:45:50 crc kubenswrapper[4721]: E0130 21:45:50.583180 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 21:45:50 crc kubenswrapper[4721]: E0130 21:45:50.583282 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift podName:fbc194de-ea06-4d56-a35a-4b63a46651df nodeName:}" failed. No retries permitted until 2026-01-30 21:45:58.583241842 +0000 UTC m=+1747.375143088 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift") pod "swift-storage-0" (UID: "fbc194de-ea06-4d56-a35a-4b63a46651df") : configmap "swift-ring-files" not found Jan 30 21:45:50 crc kubenswrapper[4721]: I0130 21:45:50.909457 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-s89nx" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.098984 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-csn7z" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.108949 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87b1d1f9-cf16-401a-b55d-a6d2434e0284","Type":"ContainerStarted","Data":"9af2c87cbead09246dcd4df390bad0241a6f351c5fc53617b7c5be3018611a72"} Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.109492 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.113352 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.148963 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=5.639907441 podStartE2EDuration="1m0.148941911s" podCreationTimestamp="2026-01-30 21:44:51 +0000 UTC" firstStartedPulling="2026-01-30 21:44:52.832570868 +0000 UTC m=+1681.624472114" lastFinishedPulling="2026-01-30 21:45:47.341605338 +0000 UTC m=+1736.133506584" observedRunningTime="2026-01-30 21:45:51.147077003 +0000 UTC m=+1739.938978259" watchObservedRunningTime="2026-01-30 21:45:51.148941911 +0000 UTC m=+1739.940843157" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.198593 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.249230 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 30 21:45:51 crc kubenswrapper[4721]: I0130 21:45:51.927009 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.002640 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-2d554"] Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.002928 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="dnsmasq-dns" containerID="cri-o://fd4681814f42c206374d5d62ac927e43b90d6f4dcb6d0ce2fdeefbc5e54c9ebc" gracePeriod=10 Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.011018 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.307731 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.309543 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.330879 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.413121 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 30 21:45:52 crc kubenswrapper[4721]: I0130 21:45:52.557768 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 30 21:45:53 crc kubenswrapper[4721]: I0130 21:45:53.133844 4721 generic.go:334] "Generic (PLEG): container finished" podID="7ef41c62-9890-463d-8888-539c1ab07cec" containerID="fd4681814f42c206374d5d62ac927e43b90d6f4dcb6d0ce2fdeefbc5e54c9ebc" exitCode=0 Jan 30 21:45:53 crc kubenswrapper[4721]: I0130 21:45:53.133923 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" event={"ID":"7ef41c62-9890-463d-8888-539c1ab07cec","Type":"ContainerDied","Data":"fd4681814f42c206374d5d62ac927e43b90d6f4dcb6d0ce2fdeefbc5e54c9ebc"} Jan 30 21:45:56 crc kubenswrapper[4721]: I0130 21:45:56.092820 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:45:56 crc kubenswrapper[4721]: E0130 21:45:56.095090 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:45:56 crc kubenswrapper[4721]: I0130 21:45:56.911354 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-4fmg9"] Jan 30 21:45:56 crc kubenswrapper[4721]: I0130 21:45:56.912577 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:56 crc kubenswrapper[4721]: I0130 21:45:56.915842 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 30 21:45:56 crc kubenswrapper[4721]: I0130 21:45:56.923378 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4fmg9"] Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.089486 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-operator-scripts\") pod \"root-account-create-update-4fmg9\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.089604 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6mvv\" (UniqueName: \"kubernetes.io/projected/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-kube-api-access-w6mvv\") pod \"root-account-create-update-4fmg9\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.191508 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-operator-scripts\") pod \"root-account-create-update-4fmg9\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.191622 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6mvv\" (UniqueName: \"kubernetes.io/projected/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-kube-api-access-w6mvv\") pod \"root-account-create-update-4fmg9\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.192401 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-operator-scripts\") pod \"root-account-create-update-4fmg9\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.211188 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6mvv\" (UniqueName: \"kubernetes.io/projected/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-kube-api-access-w6mvv\") pod \"root-account-create-update-4fmg9\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:57 crc kubenswrapper[4721]: I0130 21:45:57.243257 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4fmg9" Jan 30 21:45:58 crc kubenswrapper[4721]: I0130 21:45:58.622508 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:45:58 crc kubenswrapper[4721]: E0130 21:45:58.622748 4721 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 30 21:45:58 crc kubenswrapper[4721]: E0130 21:45:58.624077 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 30 21:45:58 crc kubenswrapper[4721]: E0130 21:45:58.624152 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift podName:fbc194de-ea06-4d56-a35a-4b63a46651df nodeName:}" failed. No retries permitted until 2026-01-30 21:46:14.624130429 +0000 UTC m=+1763.416031685 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift") pod "swift-storage-0" (UID: "fbc194de-ea06-4d56-a35a-4b63a46651df") : configmap "swift-ring-files" not found Jan 30 21:45:59 crc kubenswrapper[4721]: I0130 21:45:59.939079 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.133:5353: i/o timeout" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.113827 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bc0f-account-create-update-8z8k5"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.115427 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.118895 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.129430 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-t6hwz"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.131136 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.148284 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bc0f-account-create-update-8z8k5"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.166507 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-t6hwz"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.205317 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-qgj6c"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.214475 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.223963 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qgj6c"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.237431 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-19d6-account-create-update-crh98"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.238859 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.241696 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.246323 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-19d6-account-create-update-crh98"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.267741 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-operator-scripts\") pod \"keystone-bc0f-account-create-update-8z8k5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.268489 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlvn4\" (UniqueName: \"kubernetes.io/projected/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-kube-api-access-rlvn4\") pod \"keystone-bc0f-account-create-update-8z8k5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.268537 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-operator-scripts\") pod \"keystone-db-create-t6hwz\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.268575 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4m5x\" (UniqueName: \"kubernetes.io/projected/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-kube-api-access-q4m5x\") pod \"keystone-db-create-t6hwz\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.300114 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-49vqb"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.302163 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.314742 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-27c2-account-create-update-mhsz2"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.316004 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.317592 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.322909 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-49vqb"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.335172 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-27c2-account-create-update-mhsz2"] Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xths2\" (UniqueName: \"kubernetes.io/projected/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-kube-api-access-xths2\") pod \"placement-db-create-qgj6c\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370205 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5vtg\" (UniqueName: \"kubernetes.io/projected/fc85e9a0-f671-4a65-9989-81274e180dd9-kube-api-access-v5vtg\") pod \"glance-27c2-account-create-update-mhsz2\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370400 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-operator-scripts\") pod \"keystone-bc0f-account-create-update-8z8k5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370430 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c869ca8-e78b-46b1-8223-1cc8b4d50551-operator-scripts\") pod \"glance-db-create-49vqb\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370452 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5smm\" (UniqueName: \"kubernetes.io/projected/d8ce4272-16c3-4f60-bbde-2149a9ed8138-kube-api-access-w5smm\") pod \"placement-19d6-account-create-update-crh98\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370476 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8ce4272-16c3-4f60-bbde-2149a9ed8138-operator-scripts\") pod \"placement-19d6-account-create-update-crh98\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370501 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwstl\" (UniqueName: \"kubernetes.io/projected/4c869ca8-e78b-46b1-8223-1cc8b4d50551-kube-api-access-fwstl\") pod \"glance-db-create-49vqb\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370528 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlvn4\" (UniqueName: \"kubernetes.io/projected/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-kube-api-access-rlvn4\") pod \"keystone-bc0f-account-create-update-8z8k5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-operator-scripts\") pod \"placement-db-create-qgj6c\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370565 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-operator-scripts\") pod \"keystone-db-create-t6hwz\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370596 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4m5x\" (UniqueName: \"kubernetes.io/projected/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-kube-api-access-q4m5x\") pod \"keystone-db-create-t6hwz\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.370618 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc85e9a0-f671-4a65-9989-81274e180dd9-operator-scripts\") pod \"glance-27c2-account-create-update-mhsz2\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.371366 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-operator-scripts\") pod \"keystone-db-create-t6hwz\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.371476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-operator-scripts\") pod \"keystone-bc0f-account-create-update-8z8k5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.389955 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4m5x\" (UniqueName: \"kubernetes.io/projected/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-kube-api-access-q4m5x\") pod \"keystone-db-create-t6hwz\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.392927 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlvn4\" (UniqueName: \"kubernetes.io/projected/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-kube-api-access-rlvn4\") pod \"keystone-bc0f-account-create-update-8z8k5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.441876 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.454577 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.472833 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xths2\" (UniqueName: \"kubernetes.io/projected/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-kube-api-access-xths2\") pod \"placement-db-create-qgj6c\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.472900 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5vtg\" (UniqueName: \"kubernetes.io/projected/fc85e9a0-f671-4a65-9989-81274e180dd9-kube-api-access-v5vtg\") pod \"glance-27c2-account-create-update-mhsz2\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.472998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c869ca8-e78b-46b1-8223-1cc8b4d50551-operator-scripts\") pod \"glance-db-create-49vqb\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.473023 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5smm\" (UniqueName: \"kubernetes.io/projected/d8ce4272-16c3-4f60-bbde-2149a9ed8138-kube-api-access-w5smm\") pod \"placement-19d6-account-create-update-crh98\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.473048 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8ce4272-16c3-4f60-bbde-2149a9ed8138-operator-scripts\") pod \"placement-19d6-account-create-update-crh98\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.473075 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwstl\" (UniqueName: \"kubernetes.io/projected/4c869ca8-e78b-46b1-8223-1cc8b4d50551-kube-api-access-fwstl\") pod \"glance-db-create-49vqb\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.473104 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-operator-scripts\") pod \"placement-db-create-qgj6c\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.473147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc85e9a0-f671-4a65-9989-81274e180dd9-operator-scripts\") pod \"glance-27c2-account-create-update-mhsz2\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.474093 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc85e9a0-f671-4a65-9989-81274e180dd9-operator-scripts\") pod \"glance-27c2-account-create-update-mhsz2\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.474119 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8ce4272-16c3-4f60-bbde-2149a9ed8138-operator-scripts\") pod \"placement-19d6-account-create-update-crh98\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.474242 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-operator-scripts\") pod \"placement-db-create-qgj6c\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.474670 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c869ca8-e78b-46b1-8223-1cc8b4d50551-operator-scripts\") pod \"glance-db-create-49vqb\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.493573 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwstl\" (UniqueName: \"kubernetes.io/projected/4c869ca8-e78b-46b1-8223-1cc8b4d50551-kube-api-access-fwstl\") pod \"glance-db-create-49vqb\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.493857 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5vtg\" (UniqueName: \"kubernetes.io/projected/fc85e9a0-f671-4a65-9989-81274e180dd9-kube-api-access-v5vtg\") pod \"glance-27c2-account-create-update-mhsz2\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.494556 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xths2\" (UniqueName: \"kubernetes.io/projected/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-kube-api-access-xths2\") pod \"placement-db-create-qgj6c\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.497380 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5smm\" (UniqueName: \"kubernetes.io/projected/d8ce4272-16c3-4f60-bbde-2149a9ed8138-kube-api-access-w5smm\") pod \"placement-19d6-account-create-update-crh98\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.533778 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.559902 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.597855 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.633348 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-49vqb" Jan 30 21:46:00 crc kubenswrapper[4721]: I0130 21:46:00.642400 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:01 crc kubenswrapper[4721]: I0130 21:46:01.607679 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4k958" podUID="522b5333-a647-446e-a261-b1828a1d20a3" containerName="ovn-controller" probeResult="failure" output=< Jan 30 21:46:01 crc kubenswrapper[4721]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 30 21:46:01 crc kubenswrapper[4721]: > Jan 30 21:46:01 crc kubenswrapper[4721]: I0130 21:46:01.942566 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.105834 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdfz8\" (UniqueName: \"kubernetes.io/projected/7ef41c62-9890-463d-8888-539c1ab07cec-kube-api-access-zdfz8\") pod \"7ef41c62-9890-463d-8888-539c1ab07cec\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.105946 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-nb\") pod \"7ef41c62-9890-463d-8888-539c1ab07cec\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.106060 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb\") pod \"7ef41c62-9890-463d-8888-539c1ab07cec\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.106099 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-config\") pod \"7ef41c62-9890-463d-8888-539c1ab07cec\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.106319 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-dns-svc\") pod \"7ef41c62-9890-463d-8888-539c1ab07cec\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.122808 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.125832 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ef41c62-9890-463d-8888-539c1ab07cec-kube-api-access-zdfz8" (OuterVolumeSpecName: "kube-api-access-zdfz8") pod "7ef41c62-9890-463d-8888-539c1ab07cec" (UID: "7ef41c62-9890-463d-8888-539c1ab07cec"). InnerVolumeSpecName "kube-api-access-zdfz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.186655 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ef41c62-9890-463d-8888-539c1ab07cec" (UID: "7ef41c62-9890-463d-8888-539c1ab07cec"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.188073 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7ef41c62-9890-463d-8888-539c1ab07cec" (UID: "7ef41c62-9890-463d-8888-539c1ab07cec"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.205745 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-config" (OuterVolumeSpecName: "config") pod "7ef41c62-9890-463d-8888-539c1ab07cec" (UID: "7ef41c62-9890-463d-8888-539c1ab07cec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.209163 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ef41c62-9890-463d-8888-539c1ab07cec" (UID: "7ef41c62-9890-463d-8888-539c1ab07cec"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.209314 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb\") pod \"7ef41c62-9890-463d-8888-539c1ab07cec\" (UID: \"7ef41c62-9890-463d-8888-539c1ab07cec\") " Jan 30 21:46:02 crc kubenswrapper[4721]: W0130 21:46:02.209463 4721 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/7ef41c62-9890-463d-8888-539c1ab07cec/volumes/kubernetes.io~configmap/ovsdbserver-sb Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.209480 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ef41c62-9890-463d-8888-539c1ab07cec" (UID: "7ef41c62-9890-463d-8888-539c1ab07cec"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.210404 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.210453 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdfz8\" (UniqueName: \"kubernetes.io/projected/7ef41c62-9890-463d-8888-539c1ab07cec-kube-api-access-zdfz8\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.210465 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.210473 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.210484 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ef41c62-9890-463d-8888-539c1ab07cec-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.223980 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hkfr7" event={"ID":"81c15104-7d30-43d8-9e3d-9ab1834959da","Type":"ContainerStarted","Data":"5461d45a91813b0418334b0560d2ac6ad9f20150f050d76613646df165bfe7e3"} Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.225989 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" event={"ID":"7ef41c62-9890-463d-8888-539c1ab07cec","Type":"ContainerDied","Data":"773b0a8cb7429a7fa7eb975d9a86c3abfa4cd0c8b22e385398be71c1675b474c"} Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.226040 4721 scope.go:117] "RemoveContainer" containerID="fd4681814f42c206374d5d62ac927e43b90d6f4dcb6d0ce2fdeefbc5e54c9ebc" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.226147 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.237227 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bc0f-account-create-update-8z8k5"] Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.237942 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerStarted","Data":"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd"} Jan 30 21:46:02 crc kubenswrapper[4721]: W0130 21:46:02.259831 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ec1d7ec_c666_4a81_bf3d_afc8c29fb0c5.slice/crio-c276f109e88d6077c7c516562e1e1a324ff20a431d76a72f910a12f56d466540 WatchSource:0}: Error finding container c276f109e88d6077c7c516562e1e1a324ff20a431d76a72f910a12f56d466540: Status 404 returned error can't find the container with id c276f109e88d6077c7c516562e1e1a324ff20a431d76a72f910a12f56d466540 Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.276616 4721 scope.go:117] "RemoveContainer" containerID="70391a91f3d63e3405389cb80334e59209a71f8dcdf180c9189cf156d77755b2" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.294565 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-hkfr7" podStartSLOduration=2.431824831 podStartE2EDuration="16.294542863s" podCreationTimestamp="2026-01-30 21:45:46 +0000 UTC" firstStartedPulling="2026-01-30 21:45:47.906851983 +0000 UTC m=+1736.698753229" lastFinishedPulling="2026-01-30 21:46:01.769570015 +0000 UTC m=+1750.561471261" observedRunningTime="2026-01-30 21:46:02.243271836 +0000 UTC m=+1751.035173072" watchObservedRunningTime="2026-01-30 21:46:02.294542863 +0000 UTC m=+1751.086444109" Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.311987 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-2d554"] Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.322421 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-2d554"] Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.403270 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-19d6-account-create-update-crh98"] Jan 30 21:46:02 crc kubenswrapper[4721]: W0130 21:46:02.410498 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8ce4272_16c3_4f60_bbde_2149a9ed8138.slice/crio-f029297052c7ede82f75350d14c9a80704d94a7fb9cddc99f93f32cc0d9cf7ef WatchSource:0}: Error finding container f029297052c7ede82f75350d14c9a80704d94a7fb9cddc99f93f32cc0d9cf7ef: Status 404 returned error can't find the container with id f029297052c7ede82f75350d14c9a80704d94a7fb9cddc99f93f32cc0d9cf7ef Jan 30 21:46:02 crc kubenswrapper[4721]: W0130 21:46:02.411075 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1cf23c5_49c5_4eed_93ed_30d6c223c74e.slice/crio-ee89db89caa825c2cf8537e143f9fa5b711e29f8a4145579d9ff2c97bff58cff WatchSource:0}: Error finding container ee89db89caa825c2cf8537e143f9fa5b711e29f8a4145579d9ff2c97bff58cff: Status 404 returned error can't find the container with id ee89db89caa825c2cf8537e143f9fa5b711e29f8a4145579d9ff2c97bff58cff Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.411704 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-t6hwz"] Jan 30 21:46:02 crc kubenswrapper[4721]: W0130 21:46:02.563671 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7015b0db_a3ba_49d3_a6c7_63e581ef7ac5.slice/crio-4f882be8a2ed8cd8546c22649224ee1a16fe1d789d10c29f646d528414a1cb00 WatchSource:0}: Error finding container 4f882be8a2ed8cd8546c22649224ee1a16fe1d789d10c29f646d528414a1cb00: Status 404 returned error can't find the container with id 4f882be8a2ed8cd8546c22649224ee1a16fe1d789d10c29f646d528414a1cb00 Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.569935 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4fmg9"] Jan 30 21:46:02 crc kubenswrapper[4721]: W0130 21:46:02.578846 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f87a551_3d79_4255_b86e_cf9abbfe4fb8.slice/crio-12a85e72b8e2f947822060984c31c2eca457af21cffde6dcb50e8afdb5740603 WatchSource:0}: Error finding container 12a85e72b8e2f947822060984c31c2eca457af21cffde6dcb50e8afdb5740603: Status 404 returned error can't find the container with id 12a85e72b8e2f947822060984c31c2eca457af21cffde6dcb50e8afdb5740603 Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.580945 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qgj6c"] Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.589723 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-27c2-account-create-update-mhsz2"] Jan 30 21:46:02 crc kubenswrapper[4721]: I0130 21:46:02.596520 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-49vqb"] Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.248551 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qgj6c" event={"ID":"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5","Type":"ContainerStarted","Data":"09c00efc71846dcb98bc3a082d566774c0dd7da4febac4139d733c0872470795"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.248944 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qgj6c" event={"ID":"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5","Type":"ContainerStarted","Data":"4f882be8a2ed8cd8546c22649224ee1a16fe1d789d10c29f646d528414a1cb00"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.250037 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4fmg9" event={"ID":"3f87a551-3d79-4255-b86e-cf9abbfe4fb8","Type":"ContainerStarted","Data":"5699b25289d43cae7b988fcd7bdc8676a38e2797b16d648f600dd6a3dc5d83ee"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.250068 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4fmg9" event={"ID":"3f87a551-3d79-4255-b86e-cf9abbfe4fb8","Type":"ContainerStarted","Data":"12a85e72b8e2f947822060984c31c2eca457af21cffde6dcb50e8afdb5740603"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.251758 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1cf23c5-49c5-4eed-93ed-30d6c223c74e" containerID="c7312924b9986aecfa66ec0edc754acd51c3621de0fbe1866add74c28617418d" exitCode=0 Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.251831 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6hwz" event={"ID":"e1cf23c5-49c5-4eed-93ed-30d6c223c74e","Type":"ContainerDied","Data":"c7312924b9986aecfa66ec0edc754acd51c3621de0fbe1866add74c28617418d"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.251968 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6hwz" event={"ID":"e1cf23c5-49c5-4eed-93ed-30d6c223c74e","Type":"ContainerStarted","Data":"ee89db89caa825c2cf8537e143f9fa5b711e29f8a4145579d9ff2c97bff58cff"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.253590 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-27c2-account-create-update-mhsz2" event={"ID":"fc85e9a0-f671-4a65-9989-81274e180dd9","Type":"ContainerStarted","Data":"85ed3b69bef3fd4189566e449edcfd497bea6e3e4c800cd19def67c6421775e1"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.253640 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-27c2-account-create-update-mhsz2" event={"ID":"fc85e9a0-f671-4a65-9989-81274e180dd9","Type":"ContainerStarted","Data":"e3fb9c0c9a767ceb934c86e662636ed2d9de19c3a81f6c0c08cfa0ea1ba8d82a"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.255010 4721 generic.go:334] "Generic (PLEG): container finished" podID="d8ce4272-16c3-4f60-bbde-2149a9ed8138" containerID="740f6cbd3cd49dbe7c039fc5d55dce6094eaaa2afce31cb3c0a644207fc21e7e" exitCode=0 Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.255126 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-19d6-account-create-update-crh98" event={"ID":"d8ce4272-16c3-4f60-bbde-2149a9ed8138","Type":"ContainerDied","Data":"740f6cbd3cd49dbe7c039fc5d55dce6094eaaa2afce31cb3c0a644207fc21e7e"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.255161 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-19d6-account-create-update-crh98" event={"ID":"d8ce4272-16c3-4f60-bbde-2149a9ed8138","Type":"ContainerStarted","Data":"f029297052c7ede82f75350d14c9a80704d94a7fb9cddc99f93f32cc0d9cf7ef"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.257537 4721 generic.go:334] "Generic (PLEG): container finished" podID="7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" containerID="868132c70b29e954b284022bfba9ab97f4467f981546db42e82be0708ac2aa5e" exitCode=0 Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.257604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bc0f-account-create-update-8z8k5" event={"ID":"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5","Type":"ContainerDied","Data":"868132c70b29e954b284022bfba9ab97f4467f981546db42e82be0708ac2aa5e"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.257626 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bc0f-account-create-update-8z8k5" event={"ID":"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5","Type":"ContainerStarted","Data":"c276f109e88d6077c7c516562e1e1a324ff20a431d76a72f910a12f56d466540"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.258959 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-49vqb" event={"ID":"4c869ca8-e78b-46b1-8223-1cc8b4d50551","Type":"ContainerStarted","Data":"9e80b5aed331fd827eda7708338e0930cd7645c42a0cb0a71bdaedbb7213011a"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.259004 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-49vqb" event={"ID":"4c869ca8-e78b-46b1-8223-1cc8b4d50551","Type":"ContainerStarted","Data":"b2cae96a8d790d6438dc20dad43c549660d83453f09c2b53f92385eb65ed70f5"} Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.300233 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-qgj6c" podStartSLOduration=3.30021345 podStartE2EDuration="3.30021345s" podCreationTimestamp="2026-01-30 21:46:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:03.295774333 +0000 UTC m=+1752.087675579" watchObservedRunningTime="2026-01-30 21:46:03.30021345 +0000 UTC m=+1752.092114696" Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.405037 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-27c2-account-create-update-mhsz2" podStartSLOduration=3.405015924 podStartE2EDuration="3.405015924s" podCreationTimestamp="2026-01-30 21:46:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:03.358648549 +0000 UTC m=+1752.150549795" watchObservedRunningTime="2026-01-30 21:46:03.405015924 +0000 UTC m=+1752.196917170" Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.425575 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-4fmg9" podStartSLOduration=7.425555799 podStartE2EDuration="7.425555799s" podCreationTimestamp="2026-01-30 21:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:03.423726313 +0000 UTC m=+1752.215627559" watchObservedRunningTime="2026-01-30 21:46:03.425555799 +0000 UTC m=+1752.217457045" Jan 30 21:46:03 crc kubenswrapper[4721]: I0130 21:46:03.493732 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-49vqb" podStartSLOduration=3.493711619 podStartE2EDuration="3.493711619s" podCreationTimestamp="2026-01-30 21:46:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:03.492053608 +0000 UTC m=+1752.283954854" watchObservedRunningTime="2026-01-30 21:46:03.493711619 +0000 UTC m=+1752.285612855" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.103759 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" path="/var/lib/kubelet/pods/7ef41c62-9890-463d-8888-539c1ab07cec/volumes" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.271749 4721 generic.go:334] "Generic (PLEG): container finished" podID="fc85e9a0-f671-4a65-9989-81274e180dd9" containerID="85ed3b69bef3fd4189566e449edcfd497bea6e3e4c800cd19def67c6421775e1" exitCode=0 Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.271845 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-27c2-account-create-update-mhsz2" event={"ID":"fc85e9a0-f671-4a65-9989-81274e180dd9","Type":"ContainerDied","Data":"85ed3b69bef3fd4189566e449edcfd497bea6e3e4c800cd19def67c6421775e1"} Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.273652 4721 generic.go:334] "Generic (PLEG): container finished" podID="4c869ca8-e78b-46b1-8223-1cc8b4d50551" containerID="9e80b5aed331fd827eda7708338e0930cd7645c42a0cb0a71bdaedbb7213011a" exitCode=0 Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.273719 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-49vqb" event={"ID":"4c869ca8-e78b-46b1-8223-1cc8b4d50551","Type":"ContainerDied","Data":"9e80b5aed331fd827eda7708338e0930cd7645c42a0cb0a71bdaedbb7213011a"} Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.275939 4721 generic.go:334] "Generic (PLEG): container finished" podID="7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" containerID="09c00efc71846dcb98bc3a082d566774c0dd7da4febac4139d733c0872470795" exitCode=0 Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.275999 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qgj6c" event={"ID":"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5","Type":"ContainerDied","Data":"09c00efc71846dcb98bc3a082d566774c0dd7da4febac4139d733c0872470795"} Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.278496 4721 generic.go:334] "Generic (PLEG): container finished" podID="3f87a551-3d79-4255-b86e-cf9abbfe4fb8" containerID="5699b25289d43cae7b988fcd7bdc8676a38e2797b16d648f600dd6a3dc5d83ee" exitCode=0 Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.278659 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4fmg9" event={"ID":"3f87a551-3d79-4255-b86e-cf9abbfe4fb8","Type":"ContainerDied","Data":"5699b25289d43cae7b988fcd7bdc8676a38e2797b16d648f600dd6a3dc5d83ee"} Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.722600 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.877062 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4m5x\" (UniqueName: \"kubernetes.io/projected/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-kube-api-access-q4m5x\") pod \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.877230 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-operator-scripts\") pod \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\" (UID: \"e1cf23c5-49c5-4eed-93ed-30d6c223c74e\") " Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.878172 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e1cf23c5-49c5-4eed-93ed-30d6c223c74e" (UID: "e1cf23c5-49c5-4eed-93ed-30d6c223c74e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.883635 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-kube-api-access-q4m5x" (OuterVolumeSpecName: "kube-api-access-q4m5x") pod "e1cf23c5-49c5-4eed-93ed-30d6c223c74e" (UID: "e1cf23c5-49c5-4eed-93ed-30d6c223c74e"). InnerVolumeSpecName "kube-api-access-q4m5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.941639 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-2d554" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.133:5353: i/o timeout" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.980327 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4m5x\" (UniqueName: \"kubernetes.io/projected/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-kube-api-access-q4m5x\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.980807 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1cf23c5-49c5-4eed-93ed-30d6c223c74e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:04 crc kubenswrapper[4721]: I0130 21:46:04.997697 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.007685 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.183029 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-operator-scripts\") pod \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.183136 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlvn4\" (UniqueName: \"kubernetes.io/projected/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-kube-api-access-rlvn4\") pod \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\" (UID: \"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5\") " Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.183212 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5smm\" (UniqueName: \"kubernetes.io/projected/d8ce4272-16c3-4f60-bbde-2149a9ed8138-kube-api-access-w5smm\") pod \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.183347 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8ce4272-16c3-4f60-bbde-2149a9ed8138-operator-scripts\") pod \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\" (UID: \"d8ce4272-16c3-4f60-bbde-2149a9ed8138\") " Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.184065 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" (UID: "7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.186036 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8ce4272-16c3-4f60-bbde-2149a9ed8138-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d8ce4272-16c3-4f60-bbde-2149a9ed8138" (UID: "d8ce4272-16c3-4f60-bbde-2149a9ed8138"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.189110 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ce4272-16c3-4f60-bbde-2149a9ed8138-kube-api-access-w5smm" (OuterVolumeSpecName: "kube-api-access-w5smm") pod "d8ce4272-16c3-4f60-bbde-2149a9ed8138" (UID: "d8ce4272-16c3-4f60-bbde-2149a9ed8138"). InnerVolumeSpecName "kube-api-access-w5smm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.189556 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-kube-api-access-rlvn4" (OuterVolumeSpecName: "kube-api-access-rlvn4") pod "7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" (UID: "7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5"). InnerVolumeSpecName "kube-api-access-rlvn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.285513 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlvn4\" (UniqueName: \"kubernetes.io/projected/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-kube-api-access-rlvn4\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.285555 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5smm\" (UniqueName: \"kubernetes.io/projected/d8ce4272-16c3-4f60-bbde-2149a9ed8138-kube-api-access-w5smm\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.285568 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8ce4272-16c3-4f60-bbde-2149a9ed8138-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.285582 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.289852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerStarted","Data":"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99"} Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.292736 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t6hwz" event={"ID":"e1cf23c5-49c5-4eed-93ed-30d6c223c74e","Type":"ContainerDied","Data":"ee89db89caa825c2cf8537e143f9fa5b711e29f8a4145579d9ff2c97bff58cff"} Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.292759 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee89db89caa825c2cf8537e143f9fa5b711e29f8a4145579d9ff2c97bff58cff" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.292881 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t6hwz" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.294757 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-19d6-account-create-update-crh98" event={"ID":"d8ce4272-16c3-4f60-bbde-2149a9ed8138","Type":"ContainerDied","Data":"f029297052c7ede82f75350d14c9a80704d94a7fb9cddc99f93f32cc0d9cf7ef"} Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.294898 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f029297052c7ede82f75350d14c9a80704d94a7fb9cddc99f93f32cc0d9cf7ef" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.294992 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-19d6-account-create-update-crh98" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.308063 4721 generic.go:334] "Generic (PLEG): container finished" podID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerID="7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec" exitCode=0 Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.308150 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d483e4e0-6513-44ce-b601-359b9c2262ca","Type":"ContainerDied","Data":"7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec"} Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.310655 4721 generic.go:334] "Generic (PLEG): container finished" podID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerID="c0b0b5623f5f35b50de663a7d761c3d99abef05cd90b85da9b628b2b5a7c2233" exitCode=0 Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.310685 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1f120802-4119-4ed8-bf74-62b1e4a534bc","Type":"ContainerDied","Data":"c0b0b5623f5f35b50de663a7d761c3d99abef05cd90b85da9b628b2b5a7c2233"} Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.313801 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bc0f-account-create-update-8z8k5" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.314486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bc0f-account-create-update-8z8k5" event={"ID":"7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5","Type":"ContainerDied","Data":"c276f109e88d6077c7c516562e1e1a324ff20a431d76a72f910a12f56d466540"} Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.314515 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c276f109e88d6077c7c516562e1e1a324ff20a431d76a72f910a12f56d466540" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.830751 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-49vqb" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.898587 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwstl\" (UniqueName: \"kubernetes.io/projected/4c869ca8-e78b-46b1-8223-1cc8b4d50551-kube-api-access-fwstl\") pod \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.898792 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c869ca8-e78b-46b1-8223-1cc8b4d50551-operator-scripts\") pod \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\" (UID: \"4c869ca8-e78b-46b1-8223-1cc8b4d50551\") " Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.900142 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c869ca8-e78b-46b1-8223-1cc8b4d50551-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4c869ca8-e78b-46b1-8223-1cc8b4d50551" (UID: "4c869ca8-e78b-46b1-8223-1cc8b4d50551"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:05 crc kubenswrapper[4721]: I0130 21:46:05.914553 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c869ca8-e78b-46b1-8223-1cc8b4d50551-kube-api-access-fwstl" (OuterVolumeSpecName: "kube-api-access-fwstl") pod "4c869ca8-e78b-46b1-8223-1cc8b4d50551" (UID: "4c869ca8-e78b-46b1-8223-1cc8b4d50551"). InnerVolumeSpecName "kube-api-access-fwstl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.001811 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwstl\" (UniqueName: \"kubernetes.io/projected/4c869ca8-e78b-46b1-8223-1cc8b4d50551-kube-api-access-fwstl\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.001851 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c869ca8-e78b-46b1-8223-1cc8b4d50551-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.022417 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4fmg9" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.032220 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.039581 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.113198 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-operator-scripts\") pod \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.113276 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-operator-scripts\") pod \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.113422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc85e9a0-f671-4a65-9989-81274e180dd9-operator-scripts\") pod \"fc85e9a0-f671-4a65-9989-81274e180dd9\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.113496 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5vtg\" (UniqueName: \"kubernetes.io/projected/fc85e9a0-f671-4a65-9989-81274e180dd9-kube-api-access-v5vtg\") pod \"fc85e9a0-f671-4a65-9989-81274e180dd9\" (UID: \"fc85e9a0-f671-4a65-9989-81274e180dd9\") " Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.113535 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6mvv\" (UniqueName: \"kubernetes.io/projected/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-kube-api-access-w6mvv\") pod \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\" (UID: \"3f87a551-3d79-4255-b86e-cf9abbfe4fb8\") " Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.113598 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xths2\" (UniqueName: \"kubernetes.io/projected/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-kube-api-access-xths2\") pod \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\" (UID: \"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5\") " Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.114677 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc85e9a0-f671-4a65-9989-81274e180dd9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fc85e9a0-f671-4a65-9989-81274e180dd9" (UID: "fc85e9a0-f671-4a65-9989-81274e180dd9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.115115 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" (UID: "7015b0db-a3ba-49d3-a6c7-63e581ef7ac5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.115676 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3f87a551-3d79-4255-b86e-cf9abbfe4fb8" (UID: "3f87a551-3d79-4255-b86e-cf9abbfe4fb8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.119318 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc85e9a0-f671-4a65-9989-81274e180dd9-kube-api-access-v5vtg" (OuterVolumeSpecName: "kube-api-access-v5vtg") pod "fc85e9a0-f671-4a65-9989-81274e180dd9" (UID: "fc85e9a0-f671-4a65-9989-81274e180dd9"). InnerVolumeSpecName "kube-api-access-v5vtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.119410 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-kube-api-access-xths2" (OuterVolumeSpecName: "kube-api-access-xths2") pod "7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" (UID: "7015b0db-a3ba-49d3-a6c7-63e581ef7ac5"). InnerVolumeSpecName "kube-api-access-xths2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.123169 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-kube-api-access-w6mvv" (OuterVolumeSpecName: "kube-api-access-w6mvv") pod "3f87a551-3d79-4255-b86e-cf9abbfe4fb8" (UID: "3f87a551-3d79-4255-b86e-cf9abbfe4fb8"). InnerVolumeSpecName "kube-api-access-w6mvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.215917 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc85e9a0-f671-4a65-9989-81274e180dd9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.215962 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5vtg\" (UniqueName: \"kubernetes.io/projected/fc85e9a0-f671-4a65-9989-81274e180dd9-kube-api-access-v5vtg\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.215972 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6mvv\" (UniqueName: \"kubernetes.io/projected/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-kube-api-access-w6mvv\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.215981 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xths2\" (UniqueName: \"kubernetes.io/projected/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-kube-api-access-xths2\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.215995 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.216007 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f87a551-3d79-4255-b86e-cf9abbfe4fb8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.342544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4fmg9" event={"ID":"3f87a551-3d79-4255-b86e-cf9abbfe4fb8","Type":"ContainerDied","Data":"12a85e72b8e2f947822060984c31c2eca457af21cffde6dcb50e8afdb5740603"} Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.342883 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12a85e72b8e2f947822060984c31c2eca457af21cffde6dcb50e8afdb5740603" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.342960 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4fmg9" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.353790 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-27c2-account-create-update-mhsz2" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.353794 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-27c2-account-create-update-mhsz2" event={"ID":"fc85e9a0-f671-4a65-9989-81274e180dd9","Type":"ContainerDied","Data":"e3fb9c0c9a767ceb934c86e662636ed2d9de19c3a81f6c0c08cfa0ea1ba8d82a"} Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.353920 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3fb9c0c9a767ceb934c86e662636ed2d9de19c3a81f6c0c08cfa0ea1ba8d82a" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.355678 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d483e4e0-6513-44ce-b601-359b9c2262ca","Type":"ContainerStarted","Data":"13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d"} Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.357216 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.368622 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1f120802-4119-4ed8-bf74-62b1e4a534bc","Type":"ContainerStarted","Data":"bf9936ef84188ef8278c633d64dab570e01d424f399826f7adde77d19219657f"} Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.369030 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.376951 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-49vqb" event={"ID":"4c869ca8-e78b-46b1-8223-1cc8b4d50551","Type":"ContainerDied","Data":"b2cae96a8d790d6438dc20dad43c549660d83453f09c2b53f92385eb65ed70f5"} Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.376994 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2cae96a8d790d6438dc20dad43c549660d83453f09c2b53f92385eb65ed70f5" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.377055 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-49vqb" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.392210 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qgj6c" event={"ID":"7015b0db-a3ba-49d3-a6c7-63e581ef7ac5","Type":"ContainerDied","Data":"4f882be8a2ed8cd8546c22649224ee1a16fe1d789d10c29f646d528414a1cb00"} Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.392252 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f882be8a2ed8cd8546c22649224ee1a16fe1d789d10c29f646d528414a1cb00" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.392328 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qgj6c" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.415539 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.51575068 podStartE2EDuration="1m21.415519723s" podCreationTimestamp="2026-01-30 21:44:45 +0000 UTC" firstStartedPulling="2026-01-30 21:44:47.214641805 +0000 UTC m=+1676.006543051" lastFinishedPulling="2026-01-30 21:45:30.114410848 +0000 UTC m=+1718.906312094" observedRunningTime="2026-01-30 21:46:06.412612663 +0000 UTC m=+1755.204513909" watchObservedRunningTime="2026-01-30 21:46:06.415519723 +0000 UTC m=+1755.207420969" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.463177 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.998011037 podStartE2EDuration="1m21.463135007s" podCreationTimestamp="2026-01-30 21:44:45 +0000 UTC" firstStartedPulling="2026-01-30 21:44:47.545568428 +0000 UTC m=+1676.337469674" lastFinishedPulling="2026-01-30 21:45:30.010692408 +0000 UTC m=+1718.802593644" observedRunningTime="2026-01-30 21:46:06.455283424 +0000 UTC m=+1755.247184660" watchObservedRunningTime="2026-01-30 21:46:06.463135007 +0000 UTC m=+1755.255036253" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.651683 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.656805 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-8mqsj" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.679620 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4k958" podUID="522b5333-a647-446e-a261-b1828a1d20a3" containerName="ovn-controller" probeResult="failure" output=< Jan 30 21:46:06 crc kubenswrapper[4721]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 30 21:46:06 crc kubenswrapper[4721]: > Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909337 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4k958-config-fkflm"] Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909706 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="init" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909723 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="init" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909732 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc85e9a0-f671-4a65-9989-81274e180dd9" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909739 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc85e9a0-f671-4a65-9989-81274e180dd9" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909749 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f87a551-3d79-4255-b86e-cf9abbfe4fb8" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909756 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f87a551-3d79-4255-b86e-cf9abbfe4fb8" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909764 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c869ca8-e78b-46b1-8223-1cc8b4d50551" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909770 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c869ca8-e78b-46b1-8223-1cc8b4d50551" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909786 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909792 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909809 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909817 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909829 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="dnsmasq-dns" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909836 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="dnsmasq-dns" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909843 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1cf23c5-49c5-4eed-93ed-30d6c223c74e" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909848 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1cf23c5-49c5-4eed-93ed-30d6c223c74e" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: E0130 21:46:06.909857 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ce4272-16c3-4f60-bbde-2149a9ed8138" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.909862 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ce4272-16c3-4f60-bbde-2149a9ed8138" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910025 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ef41c62-9890-463d-8888-539c1ab07cec" containerName="dnsmasq-dns" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910058 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910073 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c869ca8-e78b-46b1-8223-1cc8b4d50551" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910090 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ce4272-16c3-4f60-bbde-2149a9ed8138" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910104 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1cf23c5-49c5-4eed-93ed-30d6c223c74e" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910116 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f87a551-3d79-4255-b86e-cf9abbfe4fb8" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910131 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" containerName="mariadb-database-create" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910142 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc85e9a0-f671-4a65-9989-81274e180dd9" containerName="mariadb-account-create-update" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.910847 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.916741 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 30 21:46:06 crc kubenswrapper[4721]: I0130 21:46:06.980491 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4k958-config-fkflm"] Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.029868 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh5xh\" (UniqueName: \"kubernetes.io/projected/03a2dcee-f733-4fce-a3c4-fbba29e97848-kube-api-access-gh5xh\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.030033 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.030110 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-additional-scripts\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.030163 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run-ovn\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.030210 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-scripts\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.030348 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-log-ovn\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.092234 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:46:07 crc kubenswrapper[4721]: E0130 21:46:07.092524 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.131654 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-scripts\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.131734 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-log-ovn\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.131925 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh5xh\" (UniqueName: \"kubernetes.io/projected/03a2dcee-f733-4fce-a3c4-fbba29e97848-kube-api-access-gh5xh\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.131985 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.132035 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-additional-scripts\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.132112 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run-ovn\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.132168 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-log-ovn\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.132219 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run-ovn\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.132273 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.133474 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-additional-scripts\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.134244 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-scripts\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.158688 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh5xh\" (UniqueName: \"kubernetes.io/projected/03a2dcee-f733-4fce-a3c4-fbba29e97848-kube-api-access-gh5xh\") pod \"ovn-controller-4k958-config-fkflm\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.230943 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:07 crc kubenswrapper[4721]: I0130 21:46:07.856666 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4k958-config-fkflm"] Jan 30 21:46:08 crc kubenswrapper[4721]: I0130 21:46:08.066889 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-4fmg9"] Jan 30 21:46:08 crc kubenswrapper[4721]: I0130 21:46:08.075339 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-4fmg9"] Jan 30 21:46:08 crc kubenswrapper[4721]: I0130 21:46:08.103611 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f87a551-3d79-4255-b86e-cf9abbfe4fb8" path="/var/lib/kubelet/pods/3f87a551-3d79-4255-b86e-cf9abbfe4fb8/volumes" Jan 30 21:46:09 crc kubenswrapper[4721]: W0130 21:46:09.310814 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03a2dcee_f733_4fce_a3c4_fbba29e97848.slice/crio-5d4d258bef8cd42d8526df29015abf1d0ed7b9fad9fc0c9e135f72f79295422b WatchSource:0}: Error finding container 5d4d258bef8cd42d8526df29015abf1d0ed7b9fad9fc0c9e135f72f79295422b: Status 404 returned error can't find the container with id 5d4d258bef8cd42d8526df29015abf1d0ed7b9fad9fc0c9e135f72f79295422b Jan 30 21:46:09 crc kubenswrapper[4721]: I0130 21:46:09.431417 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4k958-config-fkflm" event={"ID":"03a2dcee-f733-4fce-a3c4-fbba29e97848","Type":"ContainerStarted","Data":"5d4d258bef8cd42d8526df29015abf1d0ed7b9fad9fc0c9e135f72f79295422b"} Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.443836 4721 generic.go:334] "Generic (PLEG): container finished" podID="81c15104-7d30-43d8-9e3d-9ab1834959da" containerID="5461d45a91813b0418334b0560d2ac6ad9f20150f050d76613646df165bfe7e3" exitCode=0 Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.443917 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hkfr7" event={"ID":"81c15104-7d30-43d8-9e3d-9ab1834959da","Type":"ContainerDied","Data":"5461d45a91813b0418334b0560d2ac6ad9f20150f050d76613646df165bfe7e3"} Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.446391 4721 generic.go:334] "Generic (PLEG): container finished" podID="03a2dcee-f733-4fce-a3c4-fbba29e97848" containerID="6c38b9a2de24bf31b2faa93142af1bb5824a774cb81865b546dc364e9583174a" exitCode=0 Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.446752 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4k958-config-fkflm" event={"ID":"03a2dcee-f733-4fce-a3c4-fbba29e97848","Type":"ContainerDied","Data":"6c38b9a2de24bf31b2faa93142af1bb5824a774cb81865b546dc364e9583174a"} Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.541048 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-l2tjn"] Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.542406 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.545696 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.545767 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9k75v" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.551595 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-l2tjn"] Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.611866 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg22j\" (UniqueName: \"kubernetes.io/projected/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-kube-api-access-kg22j\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.611928 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-combined-ca-bundle\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.611983 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-db-sync-config-data\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.612016 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-config-data\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.713886 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg22j\" (UniqueName: \"kubernetes.io/projected/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-kube-api-access-kg22j\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.713960 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-combined-ca-bundle\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.715163 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-db-sync-config-data\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.715210 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-config-data\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.720625 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-db-sync-config-data\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.720633 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-config-data\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.726498 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-combined-ca-bundle\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.749915 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg22j\" (UniqueName: \"kubernetes.io/projected/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-kube-api-access-kg22j\") pod \"glance-db-sync-l2tjn\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:10 crc kubenswrapper[4721]: I0130 21:46:10.924623 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l2tjn" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.458225 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerStarted","Data":"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80"} Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.490991 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=8.970769967 podStartE2EDuration="1m20.490971558s" podCreationTimestamp="2026-01-30 21:44:51 +0000 UTC" firstStartedPulling="2026-01-30 21:44:58.706400442 +0000 UTC m=+1687.498301688" lastFinishedPulling="2026-01-30 21:46:10.226602033 +0000 UTC m=+1759.018503279" observedRunningTime="2026-01-30 21:46:11.487992215 +0000 UTC m=+1760.279893461" watchObservedRunningTime="2026-01-30 21:46:11.490971558 +0000 UTC m=+1760.282872804" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.554230 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-l2tjn"] Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.625682 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4k958" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.942084 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.955211 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-2vp8j"] Jan 30 21:46:11 crc kubenswrapper[4721]: E0130 21:46:11.955585 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c15104-7d30-43d8-9e3d-9ab1834959da" containerName="swift-ring-rebalance" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.955601 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c15104-7d30-43d8-9e3d-9ab1834959da" containerName="swift-ring-rebalance" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.955789 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c15104-7d30-43d8-9e3d-9ab1834959da" containerName="swift-ring-rebalance" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.956418 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.957979 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.966805 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:11 crc kubenswrapper[4721]: I0130 21:46:11.983932 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-2vp8j"] Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043635 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run\") pod \"03a2dcee-f733-4fce-a3c4-fbba29e97848\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043703 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-dispersionconf\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043724 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-additional-scripts\") pod \"03a2dcee-f733-4fce-a3c4-fbba29e97848\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043742 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-scripts\") pod \"03a2dcee-f733-4fce-a3c4-fbba29e97848\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043768 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-combined-ca-bundle\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043753 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run" (OuterVolumeSpecName: "var-run") pod "03a2dcee-f733-4fce-a3c4-fbba29e97848" (UID: "03a2dcee-f733-4fce-a3c4-fbba29e97848"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043821 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-scripts\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043844 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-ring-data-devices\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043952 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh5xh\" (UniqueName: \"kubernetes.io/projected/03a2dcee-f733-4fce-a3c4-fbba29e97848-kube-api-access-gh5xh\") pod \"03a2dcee-f733-4fce-a3c4-fbba29e97848\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.043994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-log-ovn\") pod \"03a2dcee-f733-4fce-a3c4-fbba29e97848\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044081 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhpcf\" (UniqueName: \"kubernetes.io/projected/81c15104-7d30-43d8-9e3d-9ab1834959da-kube-api-access-rhpcf\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044128 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/81c15104-7d30-43d8-9e3d-9ab1834959da-etc-swift\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-swiftconf\") pod \"81c15104-7d30-43d8-9e3d-9ab1834959da\" (UID: \"81c15104-7d30-43d8-9e3d-9ab1834959da\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run-ovn\") pod \"03a2dcee-f733-4fce-a3c4-fbba29e97848\" (UID: \"03a2dcee-f733-4fce-a3c4-fbba29e97848\") " Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044346 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "03a2dcee-f733-4fce-a3c4-fbba29e97848" (UID: "03a2dcee-f733-4fce-a3c4-fbba29e97848"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "03a2dcee-f733-4fce-a3c4-fbba29e97848" (UID: "03a2dcee-f733-4fce-a3c4-fbba29e97848"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044708 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12f9a600-5c9f-4334-8ee4-c4f2936594e3-operator-scripts\") pod \"root-account-create-update-2vp8j\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044759 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8n5x\" (UniqueName: \"kubernetes.io/projected/12f9a600-5c9f-4334-8ee4-c4f2936594e3-kube-api-access-g8n5x\") pod \"root-account-create-update-2vp8j\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044805 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-scripts" (OuterVolumeSpecName: "scripts") pod "03a2dcee-f733-4fce-a3c4-fbba29e97848" (UID: "03a2dcee-f733-4fce-a3c4-fbba29e97848"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044882 4721 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044906 4721 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.044918 4721 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.045082 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.045160 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c15104-7d30-43d8-9e3d-9ab1834959da-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.045246 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "03a2dcee-f733-4fce-a3c4-fbba29e97848" (UID: "03a2dcee-f733-4fce-a3c4-fbba29e97848"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.049391 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03a2dcee-f733-4fce-a3c4-fbba29e97848-kube-api-access-gh5xh" (OuterVolumeSpecName: "kube-api-access-gh5xh") pod "03a2dcee-f733-4fce-a3c4-fbba29e97848" (UID: "03a2dcee-f733-4fce-a3c4-fbba29e97848"). InnerVolumeSpecName "kube-api-access-gh5xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.051323 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c15104-7d30-43d8-9e3d-9ab1834959da-kube-api-access-rhpcf" (OuterVolumeSpecName: "kube-api-access-rhpcf") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "kube-api-access-rhpcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.052799 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.071364 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-scripts" (OuterVolumeSpecName: "scripts") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.075511 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.082503 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81c15104-7d30-43d8-9e3d-9ab1834959da" (UID: "81c15104-7d30-43d8-9e3d-9ab1834959da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.126245 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.146276 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12f9a600-5c9f-4334-8ee4-c4f2936594e3-operator-scripts\") pod \"root-account-create-update-2vp8j\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.146695 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8n5x\" (UniqueName: \"kubernetes.io/projected/12f9a600-5c9f-4334-8ee4-c4f2936594e3-kube-api-access-g8n5x\") pod \"root-account-create-update-2vp8j\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147103 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/81c15104-7d30-43d8-9e3d-9ab1834959da-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147127 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147139 4721 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/03a2dcee-f733-4fce-a3c4-fbba29e97848-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147151 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147164 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03a2dcee-f733-4fce-a3c4-fbba29e97848-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147176 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c15104-7d30-43d8-9e3d-9ab1834959da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147188 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147198 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/81c15104-7d30-43d8-9e3d-9ab1834959da-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147210 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh5xh\" (UniqueName: \"kubernetes.io/projected/03a2dcee-f733-4fce-a3c4-fbba29e97848-kube-api-access-gh5xh\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147223 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhpcf\" (UniqueName: \"kubernetes.io/projected/81c15104-7d30-43d8-9e3d-9ab1834959da-kube-api-access-rhpcf\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.147747 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12f9a600-5c9f-4334-8ee4-c4f2936594e3-operator-scripts\") pod \"root-account-create-update-2vp8j\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.167920 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8n5x\" (UniqueName: \"kubernetes.io/projected/12f9a600-5c9f-4334-8ee4-c4f2936594e3-kube-api-access-g8n5x\") pod \"root-account-create-update-2vp8j\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.286886 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.482026 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hkfr7" event={"ID":"81c15104-7d30-43d8-9e3d-9ab1834959da","Type":"ContainerDied","Data":"9f38989cfefc5dc73e7aa42aa18366bbfeb2f7316607533fab5a6e5f3784ef7e"} Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.482365 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f38989cfefc5dc73e7aa42aa18366bbfeb2f7316607533fab5a6e5f3784ef7e" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.482437 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hkfr7" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.490344 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4k958-config-fkflm" event={"ID":"03a2dcee-f733-4fce-a3c4-fbba29e97848","Type":"ContainerDied","Data":"5d4d258bef8cd42d8526df29015abf1d0ed7b9fad9fc0c9e135f72f79295422b"} Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.490396 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d4d258bef8cd42d8526df29015abf1d0ed7b9fad9fc0c9e135f72f79295422b" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.490467 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4k958-config-fkflm" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.499456 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l2tjn" event={"ID":"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c","Type":"ContainerStarted","Data":"f9b4d9ec8dcc6803f5aa3c5abbe7431aa96dc5fa8f3fe3bb8d633ac04cb0ea1b"} Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.762161 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:12 crc kubenswrapper[4721]: I0130 21:46:12.867215 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-2vp8j"] Jan 30 21:46:13 crc kubenswrapper[4721]: I0130 21:46:13.110627 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4k958-config-fkflm"] Jan 30 21:46:13 crc kubenswrapper[4721]: I0130 21:46:13.118523 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4k958-config-fkflm"] Jan 30 21:46:13 crc kubenswrapper[4721]: I0130 21:46:13.529145 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2vp8j" event={"ID":"12f9a600-5c9f-4334-8ee4-c4f2936594e3","Type":"ContainerStarted","Data":"9f6327d9740376f62d8ae92558a1df0c1debdc419b728c750b772a74a10efd5c"} Jan 30 21:46:13 crc kubenswrapper[4721]: I0130 21:46:13.529510 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2vp8j" event={"ID":"12f9a600-5c9f-4334-8ee4-c4f2936594e3","Type":"ContainerStarted","Data":"27fd44896710eb77d28eb01c2e3408ab9720b7fe6491de559ec8355e10dfb4ec"} Jan 30 21:46:14 crc kubenswrapper[4721]: I0130 21:46:14.102363 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03a2dcee-f733-4fce-a3c4-fbba29e97848" path="/var/lib/kubelet/pods/03a2dcee-f733-4fce-a3c4-fbba29e97848/volumes" Jan 30 21:46:14 crc kubenswrapper[4721]: I0130 21:46:14.560640 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-2vp8j" podStartSLOduration=3.560621298 podStartE2EDuration="3.560621298s" podCreationTimestamp="2026-01-30 21:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:14.554290811 +0000 UTC m=+1763.346192077" watchObservedRunningTime="2026-01-30 21:46:14.560621298 +0000 UTC m=+1763.352522534" Jan 30 21:46:14 crc kubenswrapper[4721]: I0130 21:46:14.700842 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:46:14 crc kubenswrapper[4721]: I0130 21:46:14.725537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fbc194de-ea06-4d56-a35a-4b63a46651df-etc-swift\") pod \"swift-storage-0\" (UID: \"fbc194de-ea06-4d56-a35a-4b63a46651df\") " pod="openstack/swift-storage-0" Jan 30 21:46:14 crc kubenswrapper[4721]: I0130 21:46:14.976518 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:16.644957 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.110:5671: connect: connection refused" Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:16.954757 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.111:5671: connect: connection refused" Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:17.582043 4721 generic.go:334] "Generic (PLEG): container finished" podID="12f9a600-5c9f-4334-8ee4-c4f2936594e3" containerID="9f6327d9740376f62d8ae92558a1df0c1debdc419b728c750b772a74a10efd5c" exitCode=0 Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:17.582271 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2vp8j" event={"ID":"12f9a600-5c9f-4334-8ee4-c4f2936594e3","Type":"ContainerDied","Data":"9f6327d9740376f62d8ae92558a1df0c1debdc419b728c750b772a74a10efd5c"} Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:18.092237 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:46:22 crc kubenswrapper[4721]: E0130 21:46:18.092754 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:22.119549 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:22.532545 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:22.762520 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:22 crc kubenswrapper[4721]: I0130 21:46:22.764717 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:23 crc kubenswrapper[4721]: I0130 21:46:23.644989 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:26 crc kubenswrapper[4721]: I0130 21:46:26.056318 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:46:26 crc kubenswrapper[4721]: I0130 21:46:26.057077 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="prometheus" containerID="cri-o://f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" gracePeriod=600 Jan 30 21:46:26 crc kubenswrapper[4721]: I0130 21:46:26.057239 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="config-reloader" containerID="cri-o://17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" gracePeriod=600 Jan 30 21:46:26 crc kubenswrapper[4721]: I0130 21:46:26.057249 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="thanos-sidecar" containerID="cri-o://ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" gracePeriod=600 Jan 30 21:46:26 crc kubenswrapper[4721]: I0130 21:46:26.642809 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.110:5671: connect: connection refused" Jan 30 21:46:26 crc kubenswrapper[4721]: I0130 21:46:26.951856 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.111:5671: connect: connection refused" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.197919 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.267411 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.267582 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kg22j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-l2tjn_openstack(7c432809-1bbc-46aa-b2bb-4cc7fd182b5c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.268809 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-l2tjn" podUID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.315080 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8n5x\" (UniqueName: \"kubernetes.io/projected/12f9a600-5c9f-4334-8ee4-c4f2936594e3-kube-api-access-g8n5x\") pod \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.315155 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12f9a600-5c9f-4334-8ee4-c4f2936594e3-operator-scripts\") pod \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\" (UID: \"12f9a600-5c9f-4334-8ee4-c4f2936594e3\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.316881 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12f9a600-5c9f-4334-8ee4-c4f2936594e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "12f9a600-5c9f-4334-8ee4-c4f2936594e3" (UID: "12f9a600-5c9f-4334-8ee4-c4f2936594e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.328660 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12f9a600-5c9f-4334-8ee4-c4f2936594e3-kube-api-access-g8n5x" (OuterVolumeSpecName: "kube-api-access-g8n5x") pod "12f9a600-5c9f-4334-8ee4-c4f2936594e3" (UID: "12f9a600-5c9f-4334-8ee4-c4f2936594e3"). InnerVolumeSpecName "kube-api-access-g8n5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.418416 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8n5x\" (UniqueName: \"kubernetes.io/projected/12f9a600-5c9f-4334-8ee4-c4f2936594e3-kube-api-access-g8n5x\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.418462 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12f9a600-5c9f-4334-8ee4-c4f2936594e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.585529 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.698267 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2vp8j" event={"ID":"12f9a600-5c9f-4334-8ee4-c4f2936594e3","Type":"ContainerDied","Data":"27fd44896710eb77d28eb01c2e3408ab9720b7fe6491de559ec8355e10dfb4ec"} Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.698319 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27fd44896710eb77d28eb01c2e3408ab9720b7fe6491de559ec8355e10dfb4ec" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.698391 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2vp8j" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700781 4721 generic.go:334] "Generic (PLEG): container finished" podID="8306e740-fd1d-459e-a0db-fc01a639f991" containerID="ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" exitCode=0 Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700803 4721 generic.go:334] "Generic (PLEG): container finished" podID="8306e740-fd1d-459e-a0db-fc01a639f991" containerID="17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" exitCode=0 Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700811 4721 generic.go:334] "Generic (PLEG): container finished" podID="8306e740-fd1d-459e-a0db-fc01a639f991" containerID="f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" exitCode=0 Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700838 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700843 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerDied","Data":"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80"} Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700859 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerDied","Data":"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99"} Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700868 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerDied","Data":"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd"} Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700877 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"8306e740-fd1d-459e-a0db-fc01a639f991","Type":"ContainerDied","Data":"9e1243b2d40ae2912216adc9ab6f871783d1215c9f663a8684f07fe077844029"} Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.700891 4721 scope.go:117] "RemoveContainer" containerID="ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.702268 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"26d3721fb3f15a5f8b5f4e8050074c4062384076773675e1f8543db1162a82b5"} Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.703449 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-l2tjn" podUID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724177 4721 scope.go:117] "RemoveContainer" containerID="17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724232 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-0\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724347 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8306e740-fd1d-459e-a0db-fc01a639f991-config-out\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724545 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724581 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-2\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724636 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-tls-assets\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.724672 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-web-config\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.725379 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.725596 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.725704 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-config\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.725780 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-thanos-prometheus-http-client-file\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.725835 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-1\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.725896 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s89cq\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-kube-api-access-s89cq\") pod \"8306e740-fd1d-459e-a0db-fc01a639f991\" (UID: \"8306e740-fd1d-459e-a0db-fc01a639f991\") " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.728001 4721 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.728033 4721 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.728451 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.740360 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.740359 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8306e740-fd1d-459e-a0db-fc01a639f991-config-out" (OuterVolumeSpecName: "config-out") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.751019 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.755937 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.756466 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-config" (OuterVolumeSpecName: "config") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.756612 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-kube-api-access-s89cq" (OuterVolumeSpecName: "kube-api-access-s89cq") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "kube-api-access-s89cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.770076 4721 scope.go:117] "RemoveContainer" containerID="f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.771796 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-web-config" (OuterVolumeSpecName: "web-config") pod "8306e740-fd1d-459e-a0db-fc01a639f991" (UID: "8306e740-fd1d-459e-a0db-fc01a639f991"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.789760 4721 scope.go:117] "RemoveContainer" containerID="d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.811939 4721 scope.go:117] "RemoveContainer" containerID="ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.812968 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": container with ID starting with ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80 not found: ID does not exist" containerID="ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.813004 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80"} err="failed to get container status \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": rpc error: code = NotFound desc = could not find container \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": container with ID starting with ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80 not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.813032 4721 scope.go:117] "RemoveContainer" containerID="17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.813470 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": container with ID starting with 17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99 not found: ID does not exist" containerID="17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.813517 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99"} err="failed to get container status \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": rpc error: code = NotFound desc = could not find container \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": container with ID starting with 17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99 not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.813538 4721 scope.go:117] "RemoveContainer" containerID="f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.813866 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": container with ID starting with f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd not found: ID does not exist" containerID="f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.813891 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd"} err="failed to get container status \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": rpc error: code = NotFound desc = could not find container \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": container with ID starting with f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.813908 4721 scope.go:117] "RemoveContainer" containerID="d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc" Jan 30 21:46:27 crc kubenswrapper[4721]: E0130 21:46:27.814256 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": container with ID starting with d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc not found: ID does not exist" containerID="d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.814287 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc"} err="failed to get container status \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": rpc error: code = NotFound desc = could not find container \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": container with ID starting with d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.814326 4721 scope.go:117] "RemoveContainer" containerID="ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.814634 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80"} err="failed to get container status \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": rpc error: code = NotFound desc = could not find container \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": container with ID starting with ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80 not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.814657 4721 scope.go:117] "RemoveContainer" containerID="17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.814911 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99"} err="failed to get container status \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": rpc error: code = NotFound desc = could not find container \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": container with ID starting with 17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99 not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.814932 4721 scope.go:117] "RemoveContainer" containerID="f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.815228 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd"} err="failed to get container status \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": rpc error: code = NotFound desc = could not find container \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": container with ID starting with f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.815261 4721 scope.go:117] "RemoveContainer" containerID="d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.815626 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc"} err="failed to get container status \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": rpc error: code = NotFound desc = could not find container \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": container with ID starting with d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.815648 4721 scope.go:117] "RemoveContainer" containerID="ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.815925 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80"} err="failed to get container status \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": rpc error: code = NotFound desc = could not find container \"ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80\": container with ID starting with ff569cb06fdd04187c60e73c201fc732bd459efc3d26b2609df8da2e10340f80 not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.815947 4721 scope.go:117] "RemoveContainer" containerID="17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.816276 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99"} err="failed to get container status \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": rpc error: code = NotFound desc = could not find container \"17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99\": container with ID starting with 17901c0198ef21a8bbd756dd75a28489c089070351aee60e51bacbabcdf49a99 not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.816318 4721 scope.go:117] "RemoveContainer" containerID="f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.816540 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd"} err="failed to get container status \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": rpc error: code = NotFound desc = could not find container \"f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd\": container with ID starting with f25854b8c043b770e3e49dc5fa19d529b484b464668eb76efbe06270e3652bcd not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.816559 4721 scope.go:117] "RemoveContainer" containerID="d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.816812 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc"} err="failed to get container status \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": rpc error: code = NotFound desc = could not find container \"d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc\": container with ID starting with d164e3108a1eea035565e42997111d0e6ddb076bef70ac558246ffd422a9d5dc not found: ID does not exist" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829468 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829514 4721 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829530 4721 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/8306e740-fd1d-459e-a0db-fc01a639f991-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829545 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s89cq\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-kube-api-access-s89cq\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829556 4721 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8306e740-fd1d-459e-a0db-fc01a639f991-config-out\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829582 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") on node \"crc\" " Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829592 4721 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8306e740-fd1d-459e-a0db-fc01a639f991-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.829602 4721 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8306e740-fd1d-459e-a0db-fc01a639f991-web-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.853100 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.853277 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0") on node "crc" Jan 30 21:46:27 crc kubenswrapper[4721]: I0130 21:46:27.931851 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.048409 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.058989 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070214 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:46:28 crc kubenswrapper[4721]: E0130 21:46:28.070789 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="init-config-reloader" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070816 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="init-config-reloader" Jan 30 21:46:28 crc kubenswrapper[4721]: E0130 21:46:28.070840 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03a2dcee-f733-4fce-a3c4-fbba29e97848" containerName="ovn-config" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070849 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="03a2dcee-f733-4fce-a3c4-fbba29e97848" containerName="ovn-config" Jan 30 21:46:28 crc kubenswrapper[4721]: E0130 21:46:28.070871 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="config-reloader" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070879 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="config-reloader" Jan 30 21:46:28 crc kubenswrapper[4721]: E0130 21:46:28.070890 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f9a600-5c9f-4334-8ee4-c4f2936594e3" containerName="mariadb-account-create-update" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070898 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f9a600-5c9f-4334-8ee4-c4f2936594e3" containerName="mariadb-account-create-update" Jan 30 21:46:28 crc kubenswrapper[4721]: E0130 21:46:28.070922 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="thanos-sidecar" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070930 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="thanos-sidecar" Jan 30 21:46:28 crc kubenswrapper[4721]: E0130 21:46:28.070949 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="prometheus" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.070957 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="prometheus" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.071172 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f9a600-5c9f-4334-8ee4-c4f2936594e3" containerName="mariadb-account-create-update" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.071190 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="thanos-sidecar" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.071201 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="03a2dcee-f733-4fce-a3c4-fbba29e97848" containerName="ovn-config" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.071228 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="prometheus" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.071241 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" containerName="config-reloader" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.073463 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.076130 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.076363 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.076551 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.076595 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.076820 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.077041 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-w9nwj" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.077453 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.077526 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.084525 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.086222 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.103894 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8306e740-fd1d-459e-a0db-fc01a639f991" path="/var/lib/kubelet/pods/8306e740-fd1d-459e-a0db-fc01a639f991/volumes" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242561 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndpf9\" (UniqueName: \"kubernetes.io/projected/42159633-a347-4843-9639-6e346cee733e-kube-api-access-ndpf9\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242635 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242703 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242734 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242772 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242810 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242834 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-config\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242875 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242918 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/42159633-a347-4843-9639-6e346cee733e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.242969 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.243046 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.243092 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/42159633-a347-4843-9639-6e346cee733e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344479 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/42159633-a347-4843-9639-6e346cee733e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344557 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndpf9\" (UniqueName: \"kubernetes.io/projected/42159633-a347-4843-9639-6e346cee733e-kube-api-access-ndpf9\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344592 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344639 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344659 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344689 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344749 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-config\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344814 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/42159633-a347-4843-9639-6e346cee733e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344837 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344858 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.344911 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.345534 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.345768 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.345942 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/42159633-a347-4843-9639-6e346cee733e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.346653 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.346679 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f2942c48d709e84727a5b6f13c3b84cc2416f605a50cdd6b9533ad0654f018a6/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.350925 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.351159 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.351335 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.351813 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/42159633-a347-4843-9639-6e346cee733e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.353673 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/42159633-a347-4843-9639-6e346cee733e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.353848 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.354023 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.354517 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/42159633-a347-4843-9639-6e346cee733e-config\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.368122 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndpf9\" (UniqueName: \"kubernetes.io/projected/42159633-a347-4843-9639-6e346cee733e-kube-api-access-ndpf9\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.384760 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd7ac9a1-97f6-4704-b760-c4fc57fda3b0\") pod \"prometheus-metric-storage-0\" (UID: \"42159633-a347-4843-9639-6e346cee733e\") " pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.463494 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 30 21:46:28 crc kubenswrapper[4721]: I0130 21:46:28.990163 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 30 21:46:29 crc kubenswrapper[4721]: W0130 21:46:29.083134 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42159633_a347_4843_9639_6e346cee733e.slice/crio-7112077c1aa545225d804e8d79ade434823489418fe0a7fd1c0e15ac4028ac34 WatchSource:0}: Error finding container 7112077c1aa545225d804e8d79ade434823489418fe0a7fd1c0e15ac4028ac34: Status 404 returned error can't find the container with id 7112077c1aa545225d804e8d79ade434823489418fe0a7fd1c0e15ac4028ac34 Jan 30 21:46:29 crc kubenswrapper[4721]: I0130 21:46:29.724345 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"42159633-a347-4843-9639-6e346cee733e","Type":"ContainerStarted","Data":"7112077c1aa545225d804e8d79ade434823489418fe0a7fd1c0e15ac4028ac34"} Jan 30 21:46:30 crc kubenswrapper[4721]: I0130 21:46:30.095048 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:46:30 crc kubenswrapper[4721]: E0130 21:46:30.095541 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:46:31 crc kubenswrapper[4721]: I0130 21:46:31.744944 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"ed72319bb79813ce06ed60395ef11f743de68239747c172db4bcada9e4b5d0f1"} Jan 30 21:46:31 crc kubenswrapper[4721]: I0130 21:46:31.745560 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"2b30d3aed8b60dce065b366a7d68bf880cd3af7d7c1e6b323e16fac2c62b595e"} Jan 30 21:46:32 crc kubenswrapper[4721]: I0130 21:46:32.757955 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"0098342f8a7d149576cafbd332d7e02c2f1f2e184a88c4b3af8732054e7dba43"} Jan 30 21:46:32 crc kubenswrapper[4721]: I0130 21:46:32.758606 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"fcaac06a49c7cf1c0da915b2afe7d82b7d20fa7eb655f71719092a85dd0c94ef"} Jan 30 21:46:33 crc kubenswrapper[4721]: I0130 21:46:33.189753 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-2vp8j"] Jan 30 21:46:33 crc kubenswrapper[4721]: I0130 21:46:33.203264 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-2vp8j"] Jan 30 21:46:33 crc kubenswrapper[4721]: I0130 21:46:33.771236 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"42159633-a347-4843-9639-6e346cee733e","Type":"ContainerStarted","Data":"b5286ac62c3db5d09b67fdb5eaee57e167614b5b2f4a274ce641fb444c2252d1"} Jan 30 21:46:34 crc kubenswrapper[4721]: I0130 21:46:34.103933 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12f9a600-5c9f-4334-8ee4-c4f2936594e3" path="/var/lib/kubelet/pods/12f9a600-5c9f-4334-8ee4-c4f2936594e3/volumes" Jan 30 21:46:36 crc kubenswrapper[4721]: I0130 21:46:36.645601 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 30 21:46:36 crc kubenswrapper[4721]: I0130 21:46:36.817551 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"f0acc760db967d3d73fe77fd804d79f5622f5c04cca62e574426de7cb0b7bf4f"} Jan 30 21:46:36 crc kubenswrapper[4721]: I0130 21:46:36.817601 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"f26172d51407e94e733a29dfb543ae16acfac892dec1659bd70e5d7603e06224"} Jan 30 21:46:36 crc kubenswrapper[4721]: I0130 21:46:36.817612 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"b7c9106662a5fd67fdc0f72b8a84bb008577ab85cffbd1edeeb143e3d2dfaba2"} Jan 30 21:46:36 crc kubenswrapper[4721]: I0130 21:46:36.953504 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.210633 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-778cb"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.211744 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.227529 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-778cb"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.323434 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-mxcq9"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.324776 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.336105 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg4sw\" (UniqueName: \"kubernetes.io/projected/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-kube-api-access-pg4sw\") pod \"cinder-db-create-778cb\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.336182 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-operator-scripts\") pod \"cinder-db-create-778cb\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.360220 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-mxcq9"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.382373 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-4af6-account-create-update-tkzks"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.383635 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.390948 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.409264 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-4af6-account-create-update-tkzks"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.440537 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdbvs\" (UniqueName: \"kubernetes.io/projected/8583562e-347f-4aed-9977-0b02f27f3e4f-kube-api-access-tdbvs\") pod \"barbican-db-create-mxcq9\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.440604 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg4sw\" (UniqueName: \"kubernetes.io/projected/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-kube-api-access-pg4sw\") pod \"cinder-db-create-778cb\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.440653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-operator-scripts\") pod \"cinder-db-create-778cb\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.440766 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8583562e-347f-4aed-9977-0b02f27f3e4f-operator-scripts\") pod \"barbican-db-create-mxcq9\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.441821 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-operator-scripts\") pod \"cinder-db-create-778cb\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.446273 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-bxvhs"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.447514 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.471258 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-bxvhs"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.540533 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg4sw\" (UniqueName: \"kubernetes.io/projected/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-kube-api-access-pg4sw\") pod \"cinder-db-create-778cb\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.542418 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8583562e-347f-4aed-9977-0b02f27f3e4f-operator-scripts\") pod \"barbican-db-create-mxcq9\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.542471 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdbvs\" (UniqueName: \"kubernetes.io/projected/8583562e-347f-4aed-9977-0b02f27f3e4f-kube-api-access-tdbvs\") pod \"barbican-db-create-mxcq9\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.542595 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23a03053-c813-4fd0-b38b-f30f2e40a0cf-operator-scripts\") pod \"barbican-4af6-account-create-update-tkzks\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.542625 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmmcd\" (UniqueName: \"kubernetes.io/projected/23a03053-c813-4fd0-b38b-f30f2e40a0cf-kube-api-access-mmmcd\") pod \"barbican-4af6-account-create-update-tkzks\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.543914 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-ffrrr"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.545261 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.546474 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8583562e-347f-4aed-9977-0b02f27f3e4f-operator-scripts\") pod \"barbican-db-create-mxcq9\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.549531 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qfzgk" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.549644 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.549932 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.552633 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.572656 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ffrrr"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.590288 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdbvs\" (UniqueName: \"kubernetes.io/projected/8583562e-347f-4aed-9977-0b02f27f3e4f-kube-api-access-tdbvs\") pod \"barbican-db-create-mxcq9\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.629546 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-zw7sq"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.631326 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.644582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23a03053-c813-4fd0-b38b-f30f2e40a0cf-operator-scripts\") pod \"barbican-4af6-account-create-update-tkzks\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.644653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmmcd\" (UniqueName: \"kubernetes.io/projected/23a03053-c813-4fd0-b38b-f30f2e40a0cf-kube-api-access-mmmcd\") pod \"barbican-4af6-account-create-update-tkzks\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.644798 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c66a91-2e9d-4de3-b97c-726ef7ff501d-operator-scripts\") pod \"cloudkitty-db-create-bxvhs\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.644870 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rcbf\" (UniqueName: \"kubernetes.io/projected/69c66a91-2e9d-4de3-b97c-726ef7ff501d-kube-api-access-2rcbf\") pod \"cloudkitty-db-create-bxvhs\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.658884 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-f0bb-account-create-update-k7wz8"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.660052 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.667608 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.697880 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f0bb-account-create-update-k7wz8"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rcbf\" (UniqueName: \"kubernetes.io/projected/69c66a91-2e9d-4de3-b97c-726ef7ff501d-kube-api-access-2rcbf\") pod \"cloudkitty-db-create-bxvhs\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746130 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-config-data\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx8hd\" (UniqueName: \"kubernetes.io/projected/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-kube-api-access-tx8hd\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746239 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4553135b-6050-4c65-8f3f-e20a998bb7b0-operator-scripts\") pod \"neutron-db-create-zw7sq\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746271 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99wr6\" (UniqueName: \"kubernetes.io/projected/4553135b-6050-4c65-8f3f-e20a998bb7b0-kube-api-access-99wr6\") pod \"neutron-db-create-zw7sq\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746315 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-combined-ca-bundle\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.746351 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c66a91-2e9d-4de3-b97c-726ef7ff501d-operator-scripts\") pod \"cloudkitty-db-create-bxvhs\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.762554 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-zw7sq"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.772398 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-2796-account-create-update-jbzxh"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.773593 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.777512 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.779863 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmmcd\" (UniqueName: \"kubernetes.io/projected/23a03053-c813-4fd0-b38b-f30f2e40a0cf-kube-api-access-mmmcd\") pod \"barbican-4af6-account-create-update-tkzks\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.794525 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-2796-account-create-update-jbzxh"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.808869 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23a03053-c813-4fd0-b38b-f30f2e40a0cf-operator-scripts\") pod \"barbican-4af6-account-create-update-tkzks\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.822664 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c66a91-2e9d-4de3-b97c-726ef7ff501d-operator-scripts\") pod \"cloudkitty-db-create-bxvhs\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.831507 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.838425 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-778cb" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.841961 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rcbf\" (UniqueName: \"kubernetes.io/projected/69c66a91-2e9d-4de3-b97c-726ef7ff501d-kube-api-access-2rcbf\") pod \"cloudkitty-db-create-bxvhs\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.847738 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848111 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-config-data\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx8hd\" (UniqueName: \"kubernetes.io/projected/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-kube-api-access-tx8hd\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848244 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4553135b-6050-4c65-8f3f-e20a998bb7b0-operator-scripts\") pod \"neutron-db-create-zw7sq\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848283 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99wr6\" (UniqueName: \"kubernetes.io/projected/4553135b-6050-4c65-8f3f-e20a998bb7b0-kube-api-access-99wr6\") pod \"neutron-db-create-zw7sq\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848336 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stltp\" (UniqueName: \"kubernetes.io/projected/2badf082-9873-424a-976c-4b9fde4bf13a-kube-api-access-stltp\") pod \"cinder-f0bb-account-create-update-k7wz8\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848359 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2badf082-9873-424a-976c-4b9fde4bf13a-operator-scripts\") pod \"cinder-f0bb-account-create-update-k7wz8\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.848392 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-combined-ca-bundle\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.851900 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b8f4-account-create-update-gthdn"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.853588 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.857103 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.860593 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4553135b-6050-4c65-8f3f-e20a998bb7b0-operator-scripts\") pod \"neutron-db-create-zw7sq\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.866738 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-combined-ca-bundle\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.868064 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-config-data\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.872945 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.874447 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b8f4-account-create-update-gthdn"] Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.903088 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99wr6\" (UniqueName: \"kubernetes.io/projected/4553135b-6050-4c65-8f3f-e20a998bb7b0-kube-api-access-99wr6\") pod \"neutron-db-create-zw7sq\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.917967 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx8hd\" (UniqueName: \"kubernetes.io/projected/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-kube-api-access-tx8hd\") pod \"keystone-db-sync-ffrrr\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.961641 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv8m2\" (UniqueName: \"kubernetes.io/projected/442a6735-5080-4fb6-89c0-57bcd08015a6-kube-api-access-dv8m2\") pod \"cloudkitty-2796-account-create-update-jbzxh\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.961715 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stltp\" (UniqueName: \"kubernetes.io/projected/2badf082-9873-424a-976c-4b9fde4bf13a-kube-api-access-stltp\") pod \"cinder-f0bb-account-create-update-k7wz8\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.961750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2badf082-9873-424a-976c-4b9fde4bf13a-operator-scripts\") pod \"cinder-f0bb-account-create-update-k7wz8\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.961844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz8kb\" (UniqueName: \"kubernetes.io/projected/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-kube-api-access-wz8kb\") pod \"neutron-b8f4-account-create-update-gthdn\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.961899 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-operator-scripts\") pod \"neutron-b8f4-account-create-update-gthdn\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.962046 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/442a6735-5080-4fb6-89c0-57bcd08015a6-operator-scripts\") pod \"cloudkitty-2796-account-create-update-jbzxh\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.965108 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2badf082-9873-424a-976c-4b9fde4bf13a-operator-scripts\") pod \"cinder-f0bb-account-create-update-k7wz8\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.966574 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:37 crc kubenswrapper[4721]: I0130 21:46:37.982076 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stltp\" (UniqueName: \"kubernetes.io/projected/2badf082-9873-424a-976c-4b9fde4bf13a-kube-api-access-stltp\") pod \"cinder-f0bb-account-create-update-k7wz8\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.064970 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/442a6735-5080-4fb6-89c0-57bcd08015a6-operator-scripts\") pod \"cloudkitty-2796-account-create-update-jbzxh\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.065039 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv8m2\" (UniqueName: \"kubernetes.io/projected/442a6735-5080-4fb6-89c0-57bcd08015a6-kube-api-access-dv8m2\") pod \"cloudkitty-2796-account-create-update-jbzxh\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.065093 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz8kb\" (UniqueName: \"kubernetes.io/projected/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-kube-api-access-wz8kb\") pod \"neutron-b8f4-account-create-update-gthdn\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.065117 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-operator-scripts\") pod \"neutron-b8f4-account-create-update-gthdn\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.065816 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-operator-scripts\") pod \"neutron-b8f4-account-create-update-gthdn\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.066610 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/442a6735-5080-4fb6-89c0-57bcd08015a6-operator-scripts\") pod \"cloudkitty-2796-account-create-update-jbzxh\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.087940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz8kb\" (UniqueName: \"kubernetes.io/projected/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-kube-api-access-wz8kb\") pod \"neutron-b8f4-account-create-update-gthdn\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.090691 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.092159 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv8m2\" (UniqueName: \"kubernetes.io/projected/442a6735-5080-4fb6-89c0-57bcd08015a6-kube-api-access-dv8m2\") pod \"cloudkitty-2796-account-create-update-jbzxh\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.130492 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.182656 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.230150 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-2cvzt"] Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.234482 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.248424 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.280817 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-2cvzt"] Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.305312 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.372922 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn2l7\" (UniqueName: \"kubernetes.io/projected/56e592b8-6959-4d00-94fa-fcab154f8615-kube-api-access-rn2l7\") pod \"root-account-create-update-2cvzt\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.373061 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e592b8-6959-4d00-94fa-fcab154f8615-operator-scripts\") pod \"root-account-create-update-2cvzt\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.444746 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-mxcq9"] Jan 30 21:46:38 crc kubenswrapper[4721]: W0130 21:46:38.455665 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8583562e_347f_4aed_9977_0b02f27f3e4f.slice/crio-642161e639d5c268d4059c9866a40e7883fe7e6c534b1d8d55c02141a31a20c1 WatchSource:0}: Error finding container 642161e639d5c268d4059c9866a40e7883fe7e6c534b1d8d55c02141a31a20c1: Status 404 returned error can't find the container with id 642161e639d5c268d4059c9866a40e7883fe7e6c534b1d8d55c02141a31a20c1 Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.475767 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e592b8-6959-4d00-94fa-fcab154f8615-operator-scripts\") pod \"root-account-create-update-2cvzt\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.475913 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn2l7\" (UniqueName: \"kubernetes.io/projected/56e592b8-6959-4d00-94fa-fcab154f8615-kube-api-access-rn2l7\") pod \"root-account-create-update-2cvzt\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.477833 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e592b8-6959-4d00-94fa-fcab154f8615-operator-scripts\") pod \"root-account-create-update-2cvzt\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.500282 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn2l7\" (UniqueName: \"kubernetes.io/projected/56e592b8-6959-4d00-94fa-fcab154f8615-kube-api-access-rn2l7\") pod \"root-account-create-update-2cvzt\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.592441 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2cvzt" Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.728917 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-bxvhs"] Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.848348 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-bxvhs" event={"ID":"69c66a91-2e9d-4de3-b97c-726ef7ff501d","Type":"ContainerStarted","Data":"64b2133227051f8eeb0c5dee6b5b20cdaa0c7433492aac2dde0a9f173889c48a"} Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.854442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mxcq9" event={"ID":"8583562e-347f-4aed-9977-0b02f27f3e4f","Type":"ContainerStarted","Data":"642161e639d5c268d4059c9866a40e7883fe7e6c534b1d8d55c02141a31a20c1"} Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.863542 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"548d1f3da77bb0a928d87caf2472cdb5351871075aad680205cb0b3b10e4c76d"} Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.872654 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-778cb"] Jan 30 21:46:38 crc kubenswrapper[4721]: W0130 21:46:38.879103 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4553135b_6050_4c65_8f3f_e20a998bb7b0.slice/crio-a9e2352030f2518ec4f8afefefd81303d1f79c56ab50ccaabe7794c625686384 WatchSource:0}: Error finding container a9e2352030f2518ec4f8afefefd81303d1f79c56ab50ccaabe7794c625686384: Status 404 returned error can't find the container with id a9e2352030f2518ec4f8afefefd81303d1f79c56ab50ccaabe7794c625686384 Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.903009 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-zw7sq"] Jan 30 21:46:38 crc kubenswrapper[4721]: I0130 21:46:38.932527 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-4af6-account-create-update-tkzks"] Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.080676 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f0bb-account-create-update-k7wz8"] Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.091684 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ffrrr"] Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.106078 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-2796-account-create-update-jbzxh"] Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.215756 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b8f4-account-create-update-gthdn"] Jan 30 21:46:39 crc kubenswrapper[4721]: W0130 21:46:39.230267 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3e608c6_6350_4402_a53b_e0e5c55ae5b8.slice/crio-0e7a7e116e4ca1277b540860dc36e977d6526a444eee83025f26b69dc36e8c19 WatchSource:0}: Error finding container 0e7a7e116e4ca1277b540860dc36e977d6526a444eee83025f26b69dc36e8c19: Status 404 returned error can't find the container with id 0e7a7e116e4ca1277b540860dc36e977d6526a444eee83025f26b69dc36e8c19 Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.324525 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-2cvzt"] Jan 30 21:46:39 crc kubenswrapper[4721]: W0130 21:46:39.373269 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56e592b8_6959_4d00_94fa_fcab154f8615.slice/crio-2bc19e827ca216d15eececda1ba4f5703ef53a04be047fde8bf2fb0c07fd1c67 WatchSource:0}: Error finding container 2bc19e827ca216d15eececda1ba4f5703ef53a04be047fde8bf2fb0c07fd1c67: Status 404 returned error can't find the container with id 2bc19e827ca216d15eececda1ba4f5703ef53a04be047fde8bf2fb0c07fd1c67 Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.880226 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4af6-account-create-update-tkzks" event={"ID":"23a03053-c813-4fd0-b38b-f30f2e40a0cf","Type":"ContainerStarted","Data":"89dfe56baa8a440d7c8c9d95c2a90eb2a5e0d2e6bed0bdb948854f1d536b1997"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.880312 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4af6-account-create-update-tkzks" event={"ID":"23a03053-c813-4fd0-b38b-f30f2e40a0cf","Type":"ContainerStarted","Data":"9bffaadcc04dcd0a2d8d8c562d1765a6593bfc5b9c9bd59529c6ac130b5f38ca"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.894675 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zw7sq" event={"ID":"4553135b-6050-4c65-8f3f-e20a998bb7b0","Type":"ContainerStarted","Data":"a1e784bb9b7553afc8baa6063cf52587a1e8731ba6391c7ceb670564962bb283"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.894721 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zw7sq" event={"ID":"4553135b-6050-4c65-8f3f-e20a998bb7b0","Type":"ContainerStarted","Data":"a9e2352030f2518ec4f8afefefd81303d1f79c56ab50ccaabe7794c625686384"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.897833 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ffrrr" event={"ID":"befb6e6d-91a5-46af-9c9d-59688cfbb6ec","Type":"ContainerStarted","Data":"a8531ac8d26ac46ee52b885e115381853a22626bdbf9c128e02bf05059ecaa67"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.906132 4721 generic.go:334] "Generic (PLEG): container finished" podID="8583562e-347f-4aed-9977-0b02f27f3e4f" containerID="767b137197c46c50a6c4af9e2c32e06aefbbaf90714022efb2f07324a3162df8" exitCode=0 Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.906221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mxcq9" event={"ID":"8583562e-347f-4aed-9977-0b02f27f3e4f","Type":"ContainerDied","Data":"767b137197c46c50a6c4af9e2c32e06aefbbaf90714022efb2f07324a3162df8"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.909331 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-4af6-account-create-update-tkzks" podStartSLOduration=2.909319149 podStartE2EDuration="2.909319149s" podCreationTimestamp="2026-01-30 21:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:39.902861669 +0000 UTC m=+1788.694762915" watchObservedRunningTime="2026-01-30 21:46:39.909319149 +0000 UTC m=+1788.701220395" Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.909980 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-778cb" event={"ID":"6bf51132-1e9b-4b5e-bd24-c7290cebd23c","Type":"ContainerStarted","Data":"ff989b28d4328c3a73759e5a9853666df8367ae66b5a8eb72c5700f4068a8fc7"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.910028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-778cb" event={"ID":"6bf51132-1e9b-4b5e-bd24-c7290cebd23c","Type":"ContainerStarted","Data":"f808c9da5b290cabaab5a9297e8a7784eb445b232a7b71c1bbc97107dd8d6588"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.910947 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2cvzt" event={"ID":"56e592b8-6959-4d00-94fa-fcab154f8615","Type":"ContainerStarted","Data":"39d4fbba2d60a4a44120b64620a6fdfa146b901508140eefee5b57800d9b08a7"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.910975 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2cvzt" event={"ID":"56e592b8-6959-4d00-94fa-fcab154f8615","Type":"ContainerStarted","Data":"2bc19e827ca216d15eececda1ba4f5703ef53a04be047fde8bf2fb0c07fd1c67"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.914495 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8f4-account-create-update-gthdn" event={"ID":"b3e608c6-6350-4402-a53b-e0e5c55ae5b8","Type":"ContainerStarted","Data":"2d4f8defd27885510400e99948248ff9ce180dca6370a421238fd9f4460d783a"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.914527 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8f4-account-create-update-gthdn" event={"ID":"b3e608c6-6350-4402-a53b-e0e5c55ae5b8","Type":"ContainerStarted","Data":"0e7a7e116e4ca1277b540860dc36e977d6526a444eee83025f26b69dc36e8c19"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.923940 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" event={"ID":"442a6735-5080-4fb6-89c0-57bcd08015a6","Type":"ContainerStarted","Data":"d8bb6d6ee3784ff7ea78566cbd1e939b94cc17105ffd71b68bed607eb538aff1"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.923989 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" event={"ID":"442a6735-5080-4fb6-89c0-57bcd08015a6","Type":"ContainerStarted","Data":"6f9759f17bc6a44782a0f8d257625e94a25b76a98d76171abd03eb583c30cc2f"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.937395 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-bxvhs" event={"ID":"69c66a91-2e9d-4de3-b97c-726ef7ff501d","Type":"ContainerStarted","Data":"7000253c33e1b67e7dcde6d92e872c7bd738a2a8fe2c2fd61ee7b0c8c41544a7"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.938699 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f0bb-account-create-update-k7wz8" event={"ID":"2badf082-9873-424a-976c-4b9fde4bf13a","Type":"ContainerStarted","Data":"c3191038524903b8c891f7b94fd86beb1ae608fab0f65ad79bdaa9aa73456193"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.938737 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f0bb-account-create-update-k7wz8" event={"ID":"2badf082-9873-424a-976c-4b9fde4bf13a","Type":"ContainerStarted","Data":"860060b9444f40156dc6f8d078d60a876101a990d612971165b34e682c8316a3"} Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.939165 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-zw7sq" podStartSLOduration=2.9391531520000003 podStartE2EDuration="2.939153152s" podCreationTimestamp="2026-01-30 21:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:39.925003534 +0000 UTC m=+1788.716904780" watchObservedRunningTime="2026-01-30 21:46:39.939153152 +0000 UTC m=+1788.731054398" Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.972529 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-2cvzt" podStartSLOduration=1.9725088849999999 podStartE2EDuration="1.972508885s" podCreationTimestamp="2026-01-30 21:46:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:39.965168148 +0000 UTC m=+1788.757069394" watchObservedRunningTime="2026-01-30 21:46:39.972508885 +0000 UTC m=+1788.764410131" Jan 30 21:46:39 crc kubenswrapper[4721]: I0130 21:46:39.998427 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" podStartSLOduration=2.998404746 podStartE2EDuration="2.998404746s" podCreationTimestamp="2026-01-30 21:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:39.996159007 +0000 UTC m=+1788.788060253" watchObservedRunningTime="2026-01-30 21:46:39.998404746 +0000 UTC m=+1788.790305992" Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.017030 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-b8f4-account-create-update-gthdn" podStartSLOduration=3.017011872 podStartE2EDuration="3.017011872s" podCreationTimestamp="2026-01-30 21:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:40.012043168 +0000 UTC m=+1788.803944424" watchObservedRunningTime="2026-01-30 21:46:40.017011872 +0000 UTC m=+1788.808913118" Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.064176 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-f0bb-account-create-update-k7wz8" podStartSLOduration=3.064150221 podStartE2EDuration="3.064150221s" podCreationTimestamp="2026-01-30 21:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:46:40.050006213 +0000 UTC m=+1788.841907459" watchObservedRunningTime="2026-01-30 21:46:40.064150221 +0000 UTC m=+1788.856051467" Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.951457 4721 generic.go:334] "Generic (PLEG): container finished" podID="4553135b-6050-4c65-8f3f-e20a998bb7b0" containerID="a1e784bb9b7553afc8baa6063cf52587a1e8731ba6391c7ceb670564962bb283" exitCode=0 Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.951542 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zw7sq" event={"ID":"4553135b-6050-4c65-8f3f-e20a998bb7b0","Type":"ContainerDied","Data":"a1e784bb9b7553afc8baa6063cf52587a1e8731ba6391c7ceb670564962bb283"} Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.953666 4721 generic.go:334] "Generic (PLEG): container finished" podID="42159633-a347-4843-9639-6e346cee733e" containerID="b5286ac62c3db5d09b67fdb5eaee57e167614b5b2f4a274ce641fb444c2252d1" exitCode=0 Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.953769 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"42159633-a347-4843-9639-6e346cee733e","Type":"ContainerDied","Data":"b5286ac62c3db5d09b67fdb5eaee57e167614b5b2f4a274ce641fb444c2252d1"} Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.959276 4721 generic.go:334] "Generic (PLEG): container finished" podID="6bf51132-1e9b-4b5e-bd24-c7290cebd23c" containerID="ff989b28d4328c3a73759e5a9853666df8367ae66b5a8eb72c5700f4068a8fc7" exitCode=0 Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.959376 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-778cb" event={"ID":"6bf51132-1e9b-4b5e-bd24-c7290cebd23c","Type":"ContainerDied","Data":"ff989b28d4328c3a73759e5a9853666df8367ae66b5a8eb72c5700f4068a8fc7"} Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.961007 4721 generic.go:334] "Generic (PLEG): container finished" podID="69c66a91-2e9d-4de3-b97c-726ef7ff501d" containerID="7000253c33e1b67e7dcde6d92e872c7bd738a2a8fe2c2fd61ee7b0c8c41544a7" exitCode=0 Jan 30 21:46:40 crc kubenswrapper[4721]: I0130 21:46:40.962173 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-bxvhs" event={"ID":"69c66a91-2e9d-4de3-b97c-726ef7ff501d","Type":"ContainerDied","Data":"7000253c33e1b67e7dcde6d92e872c7bd738a2a8fe2c2fd61ee7b0c8c41544a7"} Jan 30 21:46:41 crc kubenswrapper[4721]: I0130 21:46:41.974039 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"42159633-a347-4843-9639-6e346cee733e","Type":"ContainerStarted","Data":"e8625a7832f5a802b6fba8b0e5dec826cf18ed1d818456eacbe22e94c03d2364"} Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.902644 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.910026 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-778cb" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.928818 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.952765 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984406 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4553135b-6050-4c65-8f3f-e20a998bb7b0-operator-scripts\") pod \"4553135b-6050-4c65-8f3f-e20a998bb7b0\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984621 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99wr6\" (UniqueName: \"kubernetes.io/projected/4553135b-6050-4c65-8f3f-e20a998bb7b0-kube-api-access-99wr6\") pod \"4553135b-6050-4c65-8f3f-e20a998bb7b0\" (UID: \"4553135b-6050-4c65-8f3f-e20a998bb7b0\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984694 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-operator-scripts\") pod \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984790 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg4sw\" (UniqueName: \"kubernetes.io/projected/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-kube-api-access-pg4sw\") pod \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\" (UID: \"6bf51132-1e9b-4b5e-bd24-c7290cebd23c\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984796 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4553135b-6050-4c65-8f3f-e20a998bb7b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4553135b-6050-4c65-8f3f-e20a998bb7b0" (UID: "4553135b-6050-4c65-8f3f-e20a998bb7b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984882 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c66a91-2e9d-4de3-b97c-726ef7ff501d-operator-scripts\") pod \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.984923 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8583562e-347f-4aed-9977-0b02f27f3e4f-operator-scripts\") pod \"8583562e-347f-4aed-9977-0b02f27f3e4f\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.985053 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rcbf\" (UniqueName: \"kubernetes.io/projected/69c66a91-2e9d-4de3-b97c-726ef7ff501d-kube-api-access-2rcbf\") pod \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\" (UID: \"69c66a91-2e9d-4de3-b97c-726ef7ff501d\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.985105 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdbvs\" (UniqueName: \"kubernetes.io/projected/8583562e-347f-4aed-9977-0b02f27f3e4f-kube-api-access-tdbvs\") pod \"8583562e-347f-4aed-9977-0b02f27f3e4f\" (UID: \"8583562e-347f-4aed-9977-0b02f27f3e4f\") " Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.985284 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6bf51132-1e9b-4b5e-bd24-c7290cebd23c" (UID: "6bf51132-1e9b-4b5e-bd24-c7290cebd23c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.985532 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69c66a91-2e9d-4de3-b97c-726ef7ff501d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69c66a91-2e9d-4de3-b97c-726ef7ff501d" (UID: "69c66a91-2e9d-4de3-b97c-726ef7ff501d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.985902 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8583562e-347f-4aed-9977-0b02f27f3e4f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8583562e-347f-4aed-9977-0b02f27f3e4f" (UID: "8583562e-347f-4aed-9977-0b02f27f3e4f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.989636 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69c66a91-2e9d-4de3-b97c-726ef7ff501d-kube-api-access-2rcbf" (OuterVolumeSpecName: "kube-api-access-2rcbf") pod "69c66a91-2e9d-4de3-b97c-726ef7ff501d" (UID: "69c66a91-2e9d-4de3-b97c-726ef7ff501d"). InnerVolumeSpecName "kube-api-access-2rcbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.989777 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4553135b-6050-4c65-8f3f-e20a998bb7b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.989835 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.989858 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69c66a91-2e9d-4de3-b97c-726ef7ff501d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:42 crc kubenswrapper[4721]: I0130 21:46:42.989878 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8583562e-347f-4aed-9977-0b02f27f3e4f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.000203 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-bxvhs" event={"ID":"69c66a91-2e9d-4de3-b97c-726ef7ff501d","Type":"ContainerDied","Data":"64b2133227051f8eeb0c5dee6b5b20cdaa0c7433492aac2dde0a9f173889c48a"} Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.000416 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64b2133227051f8eeb0c5dee6b5b20cdaa0c7433492aac2dde0a9f173889c48a" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.000611 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-bxvhs" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.002837 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zw7sq" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.003073 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zw7sq" event={"ID":"4553135b-6050-4c65-8f3f-e20a998bb7b0","Type":"ContainerDied","Data":"a9e2352030f2518ec4f8afefefd81303d1f79c56ab50ccaabe7794c625686384"} Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.003529 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9e2352030f2518ec4f8afefefd81303d1f79c56ab50ccaabe7794c625686384" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.005540 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mxcq9" event={"ID":"8583562e-347f-4aed-9977-0b02f27f3e4f","Type":"ContainerDied","Data":"642161e639d5c268d4059c9866a40e7883fe7e6c534b1d8d55c02141a31a20c1"} Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.005581 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="642161e639d5c268d4059c9866a40e7883fe7e6c534b1d8d55c02141a31a20c1" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.006009 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mxcq9" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.009682 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-778cb" event={"ID":"6bf51132-1e9b-4b5e-bd24-c7290cebd23c","Type":"ContainerDied","Data":"f808c9da5b290cabaab5a9297e8a7784eb445b232a7b71c1bbc97107dd8d6588"} Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.009724 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f808c9da5b290cabaab5a9297e8a7784eb445b232a7b71c1bbc97107dd8d6588" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.009724 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-778cb" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.011805 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4553135b-6050-4c65-8f3f-e20a998bb7b0-kube-api-access-99wr6" (OuterVolumeSpecName: "kube-api-access-99wr6") pod "4553135b-6050-4c65-8f3f-e20a998bb7b0" (UID: "4553135b-6050-4c65-8f3f-e20a998bb7b0"). InnerVolumeSpecName "kube-api-access-99wr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.012923 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-kube-api-access-pg4sw" (OuterVolumeSpecName: "kube-api-access-pg4sw") pod "6bf51132-1e9b-4b5e-bd24-c7290cebd23c" (UID: "6bf51132-1e9b-4b5e-bd24-c7290cebd23c"). InnerVolumeSpecName "kube-api-access-pg4sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.013220 4721 generic.go:334] "Generic (PLEG): container finished" podID="56e592b8-6959-4d00-94fa-fcab154f8615" containerID="39d4fbba2d60a4a44120b64620a6fdfa146b901508140eefee5b57800d9b08a7" exitCode=0 Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.013268 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2cvzt" event={"ID":"56e592b8-6959-4d00-94fa-fcab154f8615","Type":"ContainerDied","Data":"39d4fbba2d60a4a44120b64620a6fdfa146b901508140eefee5b57800d9b08a7"} Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.015977 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8583562e-347f-4aed-9977-0b02f27f3e4f-kube-api-access-tdbvs" (OuterVolumeSpecName: "kube-api-access-tdbvs") pod "8583562e-347f-4aed-9977-0b02f27f3e4f" (UID: "8583562e-347f-4aed-9977-0b02f27f3e4f"). InnerVolumeSpecName "kube-api-access-tdbvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.092092 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rcbf\" (UniqueName: \"kubernetes.io/projected/69c66a91-2e9d-4de3-b97c-726ef7ff501d-kube-api-access-2rcbf\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.092131 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdbvs\" (UniqueName: \"kubernetes.io/projected/8583562e-347f-4aed-9977-0b02f27f3e4f-kube-api-access-tdbvs\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.092143 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99wr6\" (UniqueName: \"kubernetes.io/projected/4553135b-6050-4c65-8f3f-e20a998bb7b0-kube-api-access-99wr6\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.092158 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg4sw\" (UniqueName: \"kubernetes.io/projected/6bf51132-1e9b-4b5e-bd24-c7290cebd23c-kube-api-access-pg4sw\") on node \"crc\" DevicePath \"\"" Jan 30 21:46:43 crc kubenswrapper[4721]: I0130 21:46:43.092765 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:46:43 crc kubenswrapper[4721]: E0130 21:46:43.093061 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:46:44 crc kubenswrapper[4721]: I0130 21:46:44.025169 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"738e980fa6bc202aa4f125813992e6f0e2933b490046d73f2cf807f1da78fb6a"} Jan 30 21:46:51 crc kubenswrapper[4721]: I0130 21:46:51.093722 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"42159633-a347-4843-9639-6e346cee733e","Type":"ContainerStarted","Data":"1b69ab68ee8d373c0b4096208a00608c93d93522192ac3b7ce8e5bf3c3a56265"} Jan 30 21:46:55 crc kubenswrapper[4721]: I0130 21:46:55.092099 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:46:55 crc kubenswrapper[4721]: E0130 21:46:55.093219 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:47:06 crc kubenswrapper[4721]: I0130 21:47:06.093985 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.036808 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2cvzt" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.078584 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e592b8-6959-4d00-94fa-fcab154f8615-operator-scripts\") pod \"56e592b8-6959-4d00-94fa-fcab154f8615\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.079613 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e592b8-6959-4d00-94fa-fcab154f8615-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "56e592b8-6959-4d00-94fa-fcab154f8615" (UID: "56e592b8-6959-4d00-94fa-fcab154f8615"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.180246 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn2l7\" (UniqueName: \"kubernetes.io/projected/56e592b8-6959-4d00-94fa-fcab154f8615-kube-api-access-rn2l7\") pod \"56e592b8-6959-4d00-94fa-fcab154f8615\" (UID: \"56e592b8-6959-4d00-94fa-fcab154f8615\") " Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.180682 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e592b8-6959-4d00-94fa-fcab154f8615-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.186271 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e592b8-6959-4d00-94fa-fcab154f8615-kube-api-access-rn2l7" (OuterVolumeSpecName: "kube-api-access-rn2l7") pod "56e592b8-6959-4d00-94fa-fcab154f8615" (UID: "56e592b8-6959-4d00-94fa-fcab154f8615"). InnerVolumeSpecName "kube-api-access-rn2l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.307955 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn2l7\" (UniqueName: \"kubernetes.io/projected/56e592b8-6959-4d00-94fa-fcab154f8615-kube-api-access-rn2l7\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.359555 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-2cvzt" event={"ID":"56e592b8-6959-4d00-94fa-fcab154f8615","Type":"ContainerDied","Data":"2bc19e827ca216d15eececda1ba4f5703ef53a04be047fde8bf2fb0c07fd1c67"} Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.359628 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-2cvzt" Jan 30 21:47:09 crc kubenswrapper[4721]: I0130 21:47:09.359637 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bc19e827ca216d15eececda1ba4f5703ef53a04be047fde8bf2fb0c07fd1c67" Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.385939 4721 generic.go:334] "Generic (PLEG): container finished" podID="23a03053-c813-4fd0-b38b-f30f2e40a0cf" containerID="89dfe56baa8a440d7c8c9d95c2a90eb2a5e0d2e6bed0bdb948854f1d536b1997" exitCode=0 Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.386018 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4af6-account-create-update-tkzks" event={"ID":"23a03053-c813-4fd0-b38b-f30f2e40a0cf","Type":"ContainerDied","Data":"89dfe56baa8a440d7c8c9d95c2a90eb2a5e0d2e6bed0bdb948854f1d536b1997"} Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.390012 4721 generic.go:334] "Generic (PLEG): container finished" podID="b3e608c6-6350-4402-a53b-e0e5c55ae5b8" containerID="2d4f8defd27885510400e99948248ff9ce180dca6370a421238fd9f4460d783a" exitCode=0 Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.390097 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8f4-account-create-update-gthdn" event={"ID":"b3e608c6-6350-4402-a53b-e0e5c55ae5b8","Type":"ContainerDied","Data":"2d4f8defd27885510400e99948248ff9ce180dca6370a421238fd9f4460d783a"} Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.393263 4721 generic.go:334] "Generic (PLEG): container finished" podID="442a6735-5080-4fb6-89c0-57bcd08015a6" containerID="d8bb6d6ee3784ff7ea78566cbd1e939b94cc17105ffd71b68bed607eb538aff1" exitCode=0 Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.393366 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" event={"ID":"442a6735-5080-4fb6-89c0-57bcd08015a6","Type":"ContainerDied","Data":"d8bb6d6ee3784ff7ea78566cbd1e939b94cc17105ffd71b68bed607eb538aff1"} Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.395884 4721 generic.go:334] "Generic (PLEG): container finished" podID="2badf082-9873-424a-976c-4b9fde4bf13a" containerID="c3191038524903b8c891f7b94fd86beb1ae608fab0f65ad79bdaa9aa73456193" exitCode=0 Jan 30 21:47:11 crc kubenswrapper[4721]: I0130 21:47:11.395934 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f0bb-account-create-update-k7wz8" event={"ID":"2badf082-9873-424a-976c-4b9fde4bf13a","Type":"ContainerDied","Data":"c3191038524903b8c891f7b94fd86beb1ae608fab0f65ad79bdaa9aa73456193"} Jan 30 21:47:12 crc kubenswrapper[4721]: E0130 21:47:12.627616 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-keystone:current-podified" Jan 30 21:47:12 crc kubenswrapper[4721]: E0130 21:47:12.628191 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tx8hd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-ffrrr_openstack(befb6e6d-91a5-46af-9c9d-59688cfbb6ec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:47:12 crc kubenswrapper[4721]: E0130 21:47:12.629323 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/keystone-db-sync-ffrrr" podUID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.901758 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.950425 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.953470 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.975253 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23a03053-c813-4fd0-b38b-f30f2e40a0cf-operator-scripts\") pod \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.975637 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/442a6735-5080-4fb6-89c0-57bcd08015a6-operator-scripts\") pod \"442a6735-5080-4fb6-89c0-57bcd08015a6\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.975784 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2badf082-9873-424a-976c-4b9fde4bf13a-operator-scripts\") pod \"2badf082-9873-424a-976c-4b9fde4bf13a\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.975973 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmmcd\" (UniqueName: \"kubernetes.io/projected/23a03053-c813-4fd0-b38b-f30f2e40a0cf-kube-api-access-mmmcd\") pod \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\" (UID: \"23a03053-c813-4fd0-b38b-f30f2e40a0cf\") " Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.976126 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stltp\" (UniqueName: \"kubernetes.io/projected/2badf082-9873-424a-976c-4b9fde4bf13a-kube-api-access-stltp\") pod \"2badf082-9873-424a-976c-4b9fde4bf13a\" (UID: \"2badf082-9873-424a-976c-4b9fde4bf13a\") " Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.976358 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv8m2\" (UniqueName: \"kubernetes.io/projected/442a6735-5080-4fb6-89c0-57bcd08015a6-kube-api-access-dv8m2\") pod \"442a6735-5080-4fb6-89c0-57bcd08015a6\" (UID: \"442a6735-5080-4fb6-89c0-57bcd08015a6\") " Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.975990 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23a03053-c813-4fd0-b38b-f30f2e40a0cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "23a03053-c813-4fd0-b38b-f30f2e40a0cf" (UID: "23a03053-c813-4fd0-b38b-f30f2e40a0cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.976443 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/442a6735-5080-4fb6-89c0-57bcd08015a6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "442a6735-5080-4fb6-89c0-57bcd08015a6" (UID: "442a6735-5080-4fb6-89c0-57bcd08015a6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.980605 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2badf082-9873-424a-976c-4b9fde4bf13a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2badf082-9873-424a-976c-4b9fde4bf13a" (UID: "2badf082-9873-424a-976c-4b9fde4bf13a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.980969 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.986974 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23a03053-c813-4fd0-b38b-f30f2e40a0cf-kube-api-access-mmmcd" (OuterVolumeSpecName: "kube-api-access-mmmcd") pod "23a03053-c813-4fd0-b38b-f30f2e40a0cf" (UID: "23a03053-c813-4fd0-b38b-f30f2e40a0cf"). InnerVolumeSpecName "kube-api-access-mmmcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.987204 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2badf082-9873-424a-976c-4b9fde4bf13a-kube-api-access-stltp" (OuterVolumeSpecName: "kube-api-access-stltp") pod "2badf082-9873-424a-976c-4b9fde4bf13a" (UID: "2badf082-9873-424a-976c-4b9fde4bf13a"). InnerVolumeSpecName "kube-api-access-stltp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:12 crc kubenswrapper[4721]: I0130 21:47:12.988779 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/442a6735-5080-4fb6-89c0-57bcd08015a6-kube-api-access-dv8m2" (OuterVolumeSpecName: "kube-api-access-dv8m2") pod "442a6735-5080-4fb6-89c0-57bcd08015a6" (UID: "442a6735-5080-4fb6-89c0-57bcd08015a6"). InnerVolumeSpecName "kube-api-access-dv8m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.079092 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-operator-scripts\") pod \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.079651 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz8kb\" (UniqueName: \"kubernetes.io/projected/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-kube-api-access-wz8kb\") pod \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\" (UID: \"b3e608c6-6350-4402-a53b-e0e5c55ae5b8\") " Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.079767 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b3e608c6-6350-4402-a53b-e0e5c55ae5b8" (UID: "b3e608c6-6350-4402-a53b-e0e5c55ae5b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080130 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23a03053-c813-4fd0-b38b-f30f2e40a0cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080154 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/442a6735-5080-4fb6-89c0-57bcd08015a6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080168 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2badf082-9873-424a-976c-4b9fde4bf13a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080180 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmmcd\" (UniqueName: \"kubernetes.io/projected/23a03053-c813-4fd0-b38b-f30f2e40a0cf-kube-api-access-mmmcd\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080194 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stltp\" (UniqueName: \"kubernetes.io/projected/2badf082-9873-424a-976c-4b9fde4bf13a-kube-api-access-stltp\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080205 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.080217 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv8m2\" (UniqueName: \"kubernetes.io/projected/442a6735-5080-4fb6-89c0-57bcd08015a6-kube-api-access-dv8m2\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.083507 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-kube-api-access-wz8kb" (OuterVolumeSpecName: "kube-api-access-wz8kb") pod "b3e608c6-6350-4402-a53b-e0e5c55ae5b8" (UID: "b3e608c6-6350-4402-a53b-e0e5c55ae5b8"). InnerVolumeSpecName "kube-api-access-wz8kb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.182351 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz8kb\" (UniqueName: \"kubernetes.io/projected/b3e608c6-6350-4402-a53b-e0e5c55ae5b8-kube-api-access-wz8kb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.422091 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f0bb-account-create-update-k7wz8" event={"ID":"2badf082-9873-424a-976c-4b9fde4bf13a","Type":"ContainerDied","Data":"860060b9444f40156dc6f8d078d60a876101a990d612971165b34e682c8316a3"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.422129 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="860060b9444f40156dc6f8d078d60a876101a990d612971165b34e682c8316a3" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.422527 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f0bb-account-create-update-k7wz8" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.454501 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4af6-account-create-update-tkzks" event={"ID":"23a03053-c813-4fd0-b38b-f30f2e40a0cf","Type":"ContainerDied","Data":"9bffaadcc04dcd0a2d8d8c562d1765a6593bfc5b9c9bd59529c6ac130b5f38ca"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.454550 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bffaadcc04dcd0a2d8d8c562d1765a6593bfc5b9c9bd59529c6ac130b5f38ca" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.454638 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4af6-account-create-update-tkzks" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.472741 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"42159633-a347-4843-9639-6e346cee733e","Type":"ContainerStarted","Data":"b2f3df4fbba8f33f4ce232428e6b0a520a1672a854157d2953d053e0ac69ab17"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.504631 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"70619f5fd806431aba5ec4370d88038db34d7a1501261a7f802b030395684745"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.504683 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"83ad5fd08fba5af2340857939e653eba2491f9453fe83db039c770084e9a0cf9"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.504698 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"b9a70e9900ebc72bea0b4c33c746047495b0ec4663c2a5b0e0e26989a588bc4d"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.511547 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8f4-account-create-update-gthdn" event={"ID":"b3e608c6-6350-4402-a53b-e0e5c55ae5b8","Type":"ContainerDied","Data":"0e7a7e116e4ca1277b540860dc36e977d6526a444eee83025f26b69dc36e8c19"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.511595 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e7a7e116e4ca1277b540860dc36e977d6526a444eee83025f26b69dc36e8c19" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.511689 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8f4-account-create-update-gthdn" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.516296 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=45.516277807 podStartE2EDuration="45.516277807s" podCreationTimestamp="2026-01-30 21:46:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:13.511642893 +0000 UTC m=+1822.303544139" watchObservedRunningTime="2026-01-30 21:47:13.516277807 +0000 UTC m=+1822.308179053" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.524789 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"6c7f0e72b263faf1f8c74cf000b0aea1b54cadc81c92f3cce81eb40376057c48"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.536865 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" Jan 30 21:47:13 crc kubenswrapper[4721]: E0130 21:47:13.538280 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-keystone:current-podified\\\"\"" pod="openstack/keystone-db-sync-ffrrr" podUID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.536931 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2796-account-create-update-jbzxh" event={"ID":"442a6735-5080-4fb6-89c0-57bcd08015a6","Type":"ContainerDied","Data":"6f9759f17bc6a44782a0f8d257625e94a25b76a98d76171abd03eb583c30cc2f"} Jan 30 21:47:13 crc kubenswrapper[4721]: I0130 21:47:13.540586 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f9759f17bc6a44782a0f8d257625e94a25b76a98d76171abd03eb583c30cc2f" Jan 30 21:47:14 crc kubenswrapper[4721]: I0130 21:47:14.553459 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"abffe9a812049051957a581347f8dbc65d5d2f44f608e7cb6c4657d6141b0357"} Jan 30 21:47:14 crc kubenswrapper[4721]: I0130 21:47:14.557156 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l2tjn" event={"ID":"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c","Type":"ContainerStarted","Data":"7bbdc18fbee129f6a1f262ea43189deae8924c2717fd8a9e14cdcadf90854e4e"} Jan 30 21:47:14 crc kubenswrapper[4721]: I0130 21:47:14.593135 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-l2tjn" podStartSLOduration=3.530429854 podStartE2EDuration="1m4.59310181s" podCreationTimestamp="2026-01-30 21:46:10 +0000 UTC" firstStartedPulling="2026-01-30 21:46:11.57250542 +0000 UTC m=+1760.364406666" lastFinishedPulling="2026-01-30 21:47:12.635177376 +0000 UTC m=+1821.427078622" observedRunningTime="2026-01-30 21:47:14.572529831 +0000 UTC m=+1823.364431127" watchObservedRunningTime="2026-01-30 21:47:14.59310181 +0000 UTC m=+1823.385003096" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.572068 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"3a247473d2babf226ad99195ed2c46d36b1d986c4fe4c4ad43b8f0dbc07df5ba"} Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.572686 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fbc194de-ea06-4d56-a35a-4b63a46651df","Type":"ContainerStarted","Data":"1ef2f4768ba6d042b389421dff26d766bffea297eb0e4e2711d7fe2d2dfcb89a"} Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.628082 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=78.587381008 podStartE2EDuration="1m34.628063603s" podCreationTimestamp="2026-01-30 21:45:41 +0000 UTC" firstStartedPulling="2026-01-30 21:46:27.11065121 +0000 UTC m=+1775.902552446" lastFinishedPulling="2026-01-30 21:46:43.151333795 +0000 UTC m=+1791.943235041" observedRunningTime="2026-01-30 21:47:15.614957577 +0000 UTC m=+1824.406858873" watchObservedRunningTime="2026-01-30 21:47:15.628063603 +0000 UTC m=+1824.419964849" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.925605 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-94m4j"] Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926052 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c66a91-2e9d-4de3-b97c-726ef7ff501d" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926074 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c66a91-2e9d-4de3-b97c-726ef7ff501d" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926091 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf51132-1e9b-4b5e-bd24-c7290cebd23c" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926099 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf51132-1e9b-4b5e-bd24-c7290cebd23c" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926111 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8583562e-347f-4aed-9977-0b02f27f3e4f" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926120 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8583562e-347f-4aed-9977-0b02f27f3e4f" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926143 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23a03053-c813-4fd0-b38b-f30f2e40a0cf" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926151 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="23a03053-c813-4fd0-b38b-f30f2e40a0cf" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926165 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4553135b-6050-4c65-8f3f-e20a998bb7b0" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926172 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4553135b-6050-4c65-8f3f-e20a998bb7b0" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926186 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="442a6735-5080-4fb6-89c0-57bcd08015a6" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926193 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="442a6735-5080-4fb6-89c0-57bcd08015a6" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926205 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3e608c6-6350-4402-a53b-e0e5c55ae5b8" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926212 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3e608c6-6350-4402-a53b-e0e5c55ae5b8" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926232 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e592b8-6959-4d00-94fa-fcab154f8615" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926241 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e592b8-6959-4d00-94fa-fcab154f8615" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: E0130 21:47:15.926256 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2badf082-9873-424a-976c-4b9fde4bf13a" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926264 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2badf082-9873-424a-976c-4b9fde4bf13a" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926500 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e592b8-6959-4d00-94fa-fcab154f8615" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926522 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="69c66a91-2e9d-4de3-b97c-726ef7ff501d" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926531 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf51132-1e9b-4b5e-bd24-c7290cebd23c" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926541 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="442a6735-5080-4fb6-89c0-57bcd08015a6" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926560 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3e608c6-6350-4402-a53b-e0e5c55ae5b8" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926577 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2badf082-9873-424a-976c-4b9fde4bf13a" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926595 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="23a03053-c813-4fd0-b38b-f30f2e40a0cf" containerName="mariadb-account-create-update" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926608 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4553135b-6050-4c65-8f3f-e20a998bb7b0" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.926623 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8583562e-347f-4aed-9977-0b02f27f3e4f" containerName="mariadb-database-create" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.947186 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.954659 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 30 21:47:15 crc kubenswrapper[4721]: I0130 21:47:15.966242 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-94m4j"] Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.053416 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-svc\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.053494 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-config\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.053522 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.053568 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.053666 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8m6j\" (UniqueName: \"kubernetes.io/projected/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-kube-api-access-w8m6j\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.053700 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.156019 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-config\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.156147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.156239 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.156408 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8m6j\" (UniqueName: \"kubernetes.io/projected/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-kube-api-access-w8m6j\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.156931 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-config\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.157010 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.157113 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-svc\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.157169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.157449 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.157615 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.158461 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-svc\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.187694 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8m6j\" (UniqueName: \"kubernetes.io/projected/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-kube-api-access-w8m6j\") pod \"dnsmasq-dns-764c5664d7-94m4j\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.297316 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:16 crc kubenswrapper[4721]: I0130 21:47:16.780189 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-94m4j"] Jan 30 21:47:16 crc kubenswrapper[4721]: W0130 21:47:16.784733 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61f1d80c_1b3c_47dd_ad81_ecb373e334ca.slice/crio-7e8872c9ee373fc8bd67e100856ab1ad660e7b86677d541ce35b2f3975dc95a7 WatchSource:0}: Error finding container 7e8872c9ee373fc8bd67e100856ab1ad660e7b86677d541ce35b2f3975dc95a7: Status 404 returned error can't find the container with id 7e8872c9ee373fc8bd67e100856ab1ad660e7b86677d541ce35b2f3975dc95a7 Jan 30 21:47:17 crc kubenswrapper[4721]: I0130 21:47:17.597963 4721 generic.go:334] "Generic (PLEG): container finished" podID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerID="169c9790b7d1fed25268c931f06c0008853c008639d1085c09c26d8b2d33cd4c" exitCode=0 Jan 30 21:47:17 crc kubenswrapper[4721]: I0130 21:47:17.598335 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" event={"ID":"61f1d80c-1b3c-47dd-ad81-ecb373e334ca","Type":"ContainerDied","Data":"169c9790b7d1fed25268c931f06c0008853c008639d1085c09c26d8b2d33cd4c"} Jan 30 21:47:17 crc kubenswrapper[4721]: I0130 21:47:17.598359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" event={"ID":"61f1d80c-1b3c-47dd-ad81-ecb373e334ca","Type":"ContainerStarted","Data":"7e8872c9ee373fc8bd67e100856ab1ad660e7b86677d541ce35b2f3975dc95a7"} Jan 30 21:47:18 crc kubenswrapper[4721]: I0130 21:47:18.464537 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 30 21:47:18 crc kubenswrapper[4721]: I0130 21:47:18.621984 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" event={"ID":"61f1d80c-1b3c-47dd-ad81-ecb373e334ca","Type":"ContainerStarted","Data":"ffcb99b059110848fe470768812bd22656ed7d6b17de07a5439602803b18f445"} Jan 30 21:47:18 crc kubenswrapper[4721]: I0130 21:47:18.622197 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:18 crc kubenswrapper[4721]: I0130 21:47:18.638736 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" podStartSLOduration=3.638720261 podStartE2EDuration="3.638720261s" podCreationTimestamp="2026-01-30 21:47:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:18.637786333 +0000 UTC m=+1827.429687589" watchObservedRunningTime="2026-01-30 21:47:18.638720261 +0000 UTC m=+1827.430621497" Jan 30 21:47:21 crc kubenswrapper[4721]: I0130 21:47:21.650018 4721 generic.go:334] "Generic (PLEG): container finished" podID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" containerID="7bbdc18fbee129f6a1f262ea43189deae8924c2717fd8a9e14cdcadf90854e4e" exitCode=0 Jan 30 21:47:21 crc kubenswrapper[4721]: I0130 21:47:21.650133 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l2tjn" event={"ID":"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c","Type":"ContainerDied","Data":"7bbdc18fbee129f6a1f262ea43189deae8924c2717fd8a9e14cdcadf90854e4e"} Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.234101 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l2tjn" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.392366 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-combined-ca-bundle\") pod \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.392519 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-config-data\") pod \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.392582 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-db-sync-config-data\") pod \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.392667 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg22j\" (UniqueName: \"kubernetes.io/projected/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-kube-api-access-kg22j\") pod \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\" (UID: \"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c\") " Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.398776 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-kube-api-access-kg22j" (OuterVolumeSpecName: "kube-api-access-kg22j") pod "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" (UID: "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c"). InnerVolumeSpecName "kube-api-access-kg22j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.399594 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" (UID: "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.419042 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" (UID: "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.444375 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-config-data" (OuterVolumeSpecName: "config-data") pod "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" (UID: "7c432809-1bbc-46aa-b2bb-4cc7fd182b5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.495196 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg22j\" (UniqueName: \"kubernetes.io/projected/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-kube-api-access-kg22j\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.495236 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.495246 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.495255 4721 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.669935 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l2tjn" event={"ID":"7c432809-1bbc-46aa-b2bb-4cc7fd182b5c","Type":"ContainerDied","Data":"f9b4d9ec8dcc6803f5aa3c5abbe7431aa96dc5fa8f3fe3bb8d633ac04cb0ea1b"} Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.669988 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9b4d9ec8dcc6803f5aa3c5abbe7431aa96dc5fa8f3fe3bb8d633ac04cb0ea1b" Jan 30 21:47:23 crc kubenswrapper[4721]: I0130 21:47:23.670063 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l2tjn" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.067474 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-94m4j"] Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.068052 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerName="dnsmasq-dns" containerID="cri-o://ffcb99b059110848fe470768812bd22656ed7d6b17de07a5439602803b18f445" gracePeriod=10 Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.070971 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.121097 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jpkbv"] Jan 30 21:47:24 crc kubenswrapper[4721]: E0130 21:47:24.121707 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" containerName="glance-db-sync" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.121733 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" containerName="glance-db-sync" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.121996 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" containerName="glance-db-sync" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.123242 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.135693 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jpkbv"] Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.208662 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-config\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.209152 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.209245 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.209420 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.209594 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.209692 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w29st\" (UniqueName: \"kubernetes.io/projected/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-kube-api-access-w29st\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.311451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.311577 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.311622 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w29st\" (UniqueName: \"kubernetes.io/projected/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-kube-api-access-w29st\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.311665 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-config\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.311740 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.311778 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.312492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.312600 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.313381 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-config\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.313560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.313994 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.336157 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w29st\" (UniqueName: \"kubernetes.io/projected/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-kube-api-access-w29st\") pod \"dnsmasq-dns-74f6bcbc87-jpkbv\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.447791 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.684643 4721 generic.go:334] "Generic (PLEG): container finished" podID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerID="ffcb99b059110848fe470768812bd22656ed7d6b17de07a5439602803b18f445" exitCode=0 Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.684704 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" event={"ID":"61f1d80c-1b3c-47dd-ad81-ecb373e334ca","Type":"ContainerDied","Data":"ffcb99b059110848fe470768812bd22656ed7d6b17de07a5439602803b18f445"} Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.684736 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" event={"ID":"61f1d80c-1b3c-47dd-ad81-ecb373e334ca","Type":"ContainerDied","Data":"7e8872c9ee373fc8bd67e100856ab1ad660e7b86677d541ce35b2f3975dc95a7"} Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.684749 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e8872c9ee373fc8bd67e100856ab1ad660e7b86677d541ce35b2f3975dc95a7" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.706449 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.821110 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-nb\") pod \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.821322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-sb\") pod \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.821422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8m6j\" (UniqueName: \"kubernetes.io/projected/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-kube-api-access-w8m6j\") pod \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.821472 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-config\") pod \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.821564 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-svc\") pod \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.821589 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-swift-storage-0\") pod \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\" (UID: \"61f1d80c-1b3c-47dd-ad81-ecb373e334ca\") " Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.829542 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-kube-api-access-w8m6j" (OuterVolumeSpecName: "kube-api-access-w8m6j") pod "61f1d80c-1b3c-47dd-ad81-ecb373e334ca" (UID: "61f1d80c-1b3c-47dd-ad81-ecb373e334ca"). InnerVolumeSpecName "kube-api-access-w8m6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.876559 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "61f1d80c-1b3c-47dd-ad81-ecb373e334ca" (UID: "61f1d80c-1b3c-47dd-ad81-ecb373e334ca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.877938 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "61f1d80c-1b3c-47dd-ad81-ecb373e334ca" (UID: "61f1d80c-1b3c-47dd-ad81-ecb373e334ca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.890813 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-config" (OuterVolumeSpecName: "config") pod "61f1d80c-1b3c-47dd-ad81-ecb373e334ca" (UID: "61f1d80c-1b3c-47dd-ad81-ecb373e334ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.891646 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "61f1d80c-1b3c-47dd-ad81-ecb373e334ca" (UID: "61f1d80c-1b3c-47dd-ad81-ecb373e334ca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.894831 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "61f1d80c-1b3c-47dd-ad81-ecb373e334ca" (UID: "61f1d80c-1b3c-47dd-ad81-ecb373e334ca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.918796 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jpkbv"] Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.924594 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.924627 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8m6j\" (UniqueName: \"kubernetes.io/projected/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-kube-api-access-w8m6j\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.924645 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.924657 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.924668 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:24 crc kubenswrapper[4721]: I0130 21:47:24.924682 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/61f1d80c-1b3c-47dd-ad81-ecb373e334ca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:25 crc kubenswrapper[4721]: I0130 21:47:25.694470 4721 generic.go:334] "Generic (PLEG): container finished" podID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerID="74f549dc3ee116341bb80f53f6961d1487184a589ad71424239bc7352644d9a2" exitCode=0 Jan 30 21:47:25 crc kubenswrapper[4721]: I0130 21:47:25.694514 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" event={"ID":"9331d226-1386-4cbb-9e87-ba2c41ac2a1b","Type":"ContainerDied","Data":"74f549dc3ee116341bb80f53f6961d1487184a589ad71424239bc7352644d9a2"} Jan 30 21:47:25 crc kubenswrapper[4721]: I0130 21:47:25.694830 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-94m4j" Jan 30 21:47:25 crc kubenswrapper[4721]: I0130 21:47:25.694837 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" event={"ID":"9331d226-1386-4cbb-9e87-ba2c41ac2a1b","Type":"ContainerStarted","Data":"0cd921d4744c3c5acfedb1914924ab3dd717d16ee2050e516881dd022bb683fc"} Jan 30 21:47:25 crc kubenswrapper[4721]: I0130 21:47:25.892659 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-94m4j"] Jan 30 21:47:25 crc kubenswrapper[4721]: I0130 21:47:25.902828 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-94m4j"] Jan 30 21:47:26 crc kubenswrapper[4721]: I0130 21:47:26.105389 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" path="/var/lib/kubelet/pods/61f1d80c-1b3c-47dd-ad81-ecb373e334ca/volumes" Jan 30 21:47:26 crc kubenswrapper[4721]: I0130 21:47:26.703733 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" event={"ID":"9331d226-1386-4cbb-9e87-ba2c41ac2a1b","Type":"ContainerStarted","Data":"0e06051a923deceb6b1c43d8e0d056a958516e7c093b7447ef4d3de26f5673c4"} Jan 30 21:47:26 crc kubenswrapper[4721]: I0130 21:47:26.704809 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:26 crc kubenswrapper[4721]: I0130 21:47:26.728003 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podStartSLOduration=2.7279782949999998 podStartE2EDuration="2.727978295s" podCreationTimestamp="2026-01-30 21:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:26.71975532 +0000 UTC m=+1835.511656586" watchObservedRunningTime="2026-01-30 21:47:26.727978295 +0000 UTC m=+1835.519879541" Jan 30 21:47:28 crc kubenswrapper[4721]: I0130 21:47:28.464687 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 30 21:47:28 crc kubenswrapper[4721]: I0130 21:47:28.470713 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 30 21:47:28 crc kubenswrapper[4721]: I0130 21:47:28.725776 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 30 21:47:29 crc kubenswrapper[4721]: I0130 21:47:29.728027 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ffrrr" event={"ID":"befb6e6d-91a5-46af-9c9d-59688cfbb6ec","Type":"ContainerStarted","Data":"5dd298f39c14e1ee346a82eb396a55b2437f9d2dd2835eba61d04d5c984168ab"} Jan 30 21:47:29 crc kubenswrapper[4721]: I0130 21:47:29.747681 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-ffrrr" podStartSLOduration=2.771646189 podStartE2EDuration="52.747662404s" podCreationTimestamp="2026-01-30 21:46:37 +0000 UTC" firstStartedPulling="2026-01-30 21:46:39.149333576 +0000 UTC m=+1787.941234822" lastFinishedPulling="2026-01-30 21:47:29.125349791 +0000 UTC m=+1837.917251037" observedRunningTime="2026-01-30 21:47:29.74299024 +0000 UTC m=+1838.534891486" watchObservedRunningTime="2026-01-30 21:47:29.747662404 +0000 UTC m=+1838.539563650" Jan 30 21:47:34 crc kubenswrapper[4721]: I0130 21:47:34.450488 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:47:34 crc kubenswrapper[4721]: I0130 21:47:34.523200 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-msqq2"] Jan 30 21:47:34 crc kubenswrapper[4721]: I0130 21:47:34.523636 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-msqq2" podUID="d9fe7811-2c66-433e-9173-a670957604bc" containerName="dnsmasq-dns" containerID="cri-o://2d7d34b57b2e76e29321e225d0442c7d40d4a8bd2e4d1d8d986b6e36f79e50c6" gracePeriod=10 Jan 30 21:47:34 crc kubenswrapper[4721]: I0130 21:47:34.794529 4721 generic.go:334] "Generic (PLEG): container finished" podID="d9fe7811-2c66-433e-9173-a670957604bc" containerID="2d7d34b57b2e76e29321e225d0442c7d40d4a8bd2e4d1d8d986b6e36f79e50c6" exitCode=0 Jan 30 21:47:34 crc kubenswrapper[4721]: I0130 21:47:34.794582 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-msqq2" event={"ID":"d9fe7811-2c66-433e-9173-a670957604bc","Type":"ContainerDied","Data":"2d7d34b57b2e76e29321e225d0442c7d40d4a8bd2e4d1d8d986b6e36f79e50c6"} Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.122463 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.249784 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-475qv\" (UniqueName: \"kubernetes.io/projected/d9fe7811-2c66-433e-9173-a670957604bc-kube-api-access-475qv\") pod \"d9fe7811-2c66-433e-9173-a670957604bc\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.249921 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-sb\") pod \"d9fe7811-2c66-433e-9173-a670957604bc\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.249994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-config\") pod \"d9fe7811-2c66-433e-9173-a670957604bc\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.250040 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-nb\") pod \"d9fe7811-2c66-433e-9173-a670957604bc\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.250162 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-dns-svc\") pod \"d9fe7811-2c66-433e-9173-a670957604bc\" (UID: \"d9fe7811-2c66-433e-9173-a670957604bc\") " Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.257561 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9fe7811-2c66-433e-9173-a670957604bc-kube-api-access-475qv" (OuterVolumeSpecName: "kube-api-access-475qv") pod "d9fe7811-2c66-433e-9173-a670957604bc" (UID: "d9fe7811-2c66-433e-9173-a670957604bc"). InnerVolumeSpecName "kube-api-access-475qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.296789 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d9fe7811-2c66-433e-9173-a670957604bc" (UID: "d9fe7811-2c66-433e-9173-a670957604bc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.300822 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-config" (OuterVolumeSpecName: "config") pod "d9fe7811-2c66-433e-9173-a670957604bc" (UID: "d9fe7811-2c66-433e-9173-a670957604bc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.304727 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d9fe7811-2c66-433e-9173-a670957604bc" (UID: "d9fe7811-2c66-433e-9173-a670957604bc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.316339 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d9fe7811-2c66-433e-9173-a670957604bc" (UID: "d9fe7811-2c66-433e-9173-a670957604bc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.355206 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.355540 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-475qv\" (UniqueName: \"kubernetes.io/projected/d9fe7811-2c66-433e-9173-a670957604bc-kube-api-access-475qv\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.355646 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.355700 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.355795 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9fe7811-2c66-433e-9173-a670957604bc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.806518 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-msqq2" event={"ID":"d9fe7811-2c66-433e-9173-a670957604bc","Type":"ContainerDied","Data":"0967ed73e0df7213d9d02726d55c62876ba094a2d77367e276c04bcf49acc5bb"} Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.806571 4721 scope.go:117] "RemoveContainer" containerID="2d7d34b57b2e76e29321e225d0442c7d40d4a8bd2e4d1d8d986b6e36f79e50c6" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.806692 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-msqq2" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.830355 4721 scope.go:117] "RemoveContainer" containerID="50b2e3fdcd8aae947dc91d4ba748ece22d316a74431b6d5f60d179b765ea4d3b" Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.857808 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-msqq2"] Jan 30 21:47:35 crc kubenswrapper[4721]: I0130 21:47:35.871138 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-msqq2"] Jan 30 21:47:36 crc kubenswrapper[4721]: I0130 21:47:36.104478 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9fe7811-2c66-433e-9173-a670957604bc" path="/var/lib/kubelet/pods/d9fe7811-2c66-433e-9173-a670957604bc/volumes" Jan 30 21:47:36 crc kubenswrapper[4721]: I0130 21:47:36.818987 4721 generic.go:334] "Generic (PLEG): container finished" podID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" containerID="5dd298f39c14e1ee346a82eb396a55b2437f9d2dd2835eba61d04d5c984168ab" exitCode=0 Jan 30 21:47:36 crc kubenswrapper[4721]: I0130 21:47:36.819105 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ffrrr" event={"ID":"befb6e6d-91a5-46af-9c9d-59688cfbb6ec","Type":"ContainerDied","Data":"5dd298f39c14e1ee346a82eb396a55b2437f9d2dd2835eba61d04d5c984168ab"} Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.238431 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.418145 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-combined-ca-bundle\") pod \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.418813 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx8hd\" (UniqueName: \"kubernetes.io/projected/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-kube-api-access-tx8hd\") pod \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.419033 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-config-data\") pod \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\" (UID: \"befb6e6d-91a5-46af-9c9d-59688cfbb6ec\") " Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.424593 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-kube-api-access-tx8hd" (OuterVolumeSpecName: "kube-api-access-tx8hd") pod "befb6e6d-91a5-46af-9c9d-59688cfbb6ec" (UID: "befb6e6d-91a5-46af-9c9d-59688cfbb6ec"). InnerVolumeSpecName "kube-api-access-tx8hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.447154 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "befb6e6d-91a5-46af-9c9d-59688cfbb6ec" (UID: "befb6e6d-91a5-46af-9c9d-59688cfbb6ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.465306 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-config-data" (OuterVolumeSpecName: "config-data") pod "befb6e6d-91a5-46af-9c9d-59688cfbb6ec" (UID: "befb6e6d-91a5-46af-9c9d-59688cfbb6ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.521537 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx8hd\" (UniqueName: \"kubernetes.io/projected/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-kube-api-access-tx8hd\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.521630 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.521648 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befb6e6d-91a5-46af-9c9d-59688cfbb6ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.837861 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ffrrr" event={"ID":"befb6e6d-91a5-46af-9c9d-59688cfbb6ec","Type":"ContainerDied","Data":"a8531ac8d26ac46ee52b885e115381853a22626bdbf9c128e02bf05059ecaa67"} Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.837907 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8531ac8d26ac46ee52b885e115381853a22626bdbf9c128e02bf05059ecaa67" Jan 30 21:47:38 crc kubenswrapper[4721]: I0130 21:47:38.837923 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ffrrr" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.120625 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-9tdw7"] Jan 30 21:47:39 crc kubenswrapper[4721]: E0130 21:47:39.127596 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fe7811-2c66-433e-9173-a670957604bc" containerName="dnsmasq-dns" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.127655 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fe7811-2c66-433e-9173-a670957604bc" containerName="dnsmasq-dns" Jan 30 21:47:39 crc kubenswrapper[4721]: E0130 21:47:39.127735 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerName="dnsmasq-dns" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.127747 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerName="dnsmasq-dns" Jan 30 21:47:39 crc kubenswrapper[4721]: E0130 21:47:39.127779 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerName="init" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.127790 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerName="init" Jan 30 21:47:39 crc kubenswrapper[4721]: E0130 21:47:39.127819 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fe7811-2c66-433e-9173-a670957604bc" containerName="init" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.128021 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fe7811-2c66-433e-9173-a670957604bc" containerName="init" Jan 30 21:47:39 crc kubenswrapper[4721]: E0130 21:47:39.128066 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" containerName="keystone-db-sync" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.128091 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" containerName="keystone-db-sync" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.132573 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fe7811-2c66-433e-9173-a670957604bc" containerName="dnsmasq-dns" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.132716 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="61f1d80c-1b3c-47dd-ad81-ecb373e334ca" containerName="dnsmasq-dns" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.132737 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" containerName="keystone-db-sync" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.147506 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.208009 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-9tdw7"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.239420 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-nfc55"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.241141 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.246104 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.246254 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.246570 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.246682 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.246687 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qfzgk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.255366 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-config\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.255455 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.255505 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.255563 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-svc\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.255631 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.255674 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmfks\" (UniqueName: \"kubernetes.io/projected/82787ece-6910-4b49-a66f-fa2ea94c77b8-kube-api-access-pmfks\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.258778 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nfc55"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371302 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-scripts\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371389 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371461 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-combined-ca-bundle\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371488 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7lhw\" (UniqueName: \"kubernetes.io/projected/65808c08-fb8d-4039-a516-700ff573ca09-kube-api-access-x7lhw\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371508 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmfks\" (UniqueName: \"kubernetes.io/projected/82787ece-6910-4b49-a66f-fa2ea94c77b8-kube-api-access-pmfks\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371536 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-config\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371586 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371636 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371690 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-config-data\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-fernet-keys\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371742 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-credential-keys\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.371783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-svc\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.372571 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.373473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.374470 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-config\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.375271 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.379830 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-svc\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.398766 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-jbqqk"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.400455 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.406207 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-zj7p9" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.415376 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.415662 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.416259 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmfks\" (UniqueName: \"kubernetes.io/projected/82787ece-6910-4b49-a66f-fa2ea94c77b8-kube-api-access-pmfks\") pod \"dnsmasq-dns-847c4cc679-9tdw7\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.416941 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-k4phl"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.418540 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.425105 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.425362 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-78jsp" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.425569 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.455347 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-jbqqk"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479154 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-k4phl"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479503 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-combined-ca-bundle\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7lhw\" (UniqueName: \"kubernetes.io/projected/65808c08-fb8d-4039-a516-700ff573ca09-kube-api-access-x7lhw\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479690 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-config-data\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-fernet-keys\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479747 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-credential-keys\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.479805 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-scripts\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.497057 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-combined-ca-bundle\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.497560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-scripts\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.505205 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-fernet-keys\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.507056 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-credential-keys\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.519198 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-config-data\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.534681 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.537962 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7lhw\" (UniqueName: \"kubernetes.io/projected/65808c08-fb8d-4039-a516-700ff573ca09-kube-api-access-x7lhw\") pod \"keystone-bootstrap-nfc55\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.572379 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-gr825"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.573978 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.581182 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2qsxv" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585573 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-db-sync-config-data\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585646 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-combined-ca-bundle\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v94d8\" (UniqueName: \"kubernetes.io/projected/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-kube-api-access-v94d8\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585741 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-etc-machine-id\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585772 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfjqc\" (UniqueName: \"kubernetes.io/projected/46c7155a-444a-42b9-9e5d-183998bc5d22-kube-api-access-lfjqc\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585811 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-config-data\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585834 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-combined-ca-bundle\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585883 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-config\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.585911 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-scripts\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.587722 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.603390 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.642726 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.646003 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.662248 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.663197 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689264 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-config\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689374 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-scripts\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689464 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-db-sync-config-data\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689576 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-combined-ca-bundle\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689674 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-db-sync-config-data\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689720 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nktdd\" (UniqueName: \"kubernetes.io/projected/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-kube-api-access-nktdd\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v94d8\" (UniqueName: \"kubernetes.io/projected/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-kube-api-access-v94d8\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.689812 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-etc-machine-id\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.691067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfjqc\" (UniqueName: \"kubernetes.io/projected/46c7155a-444a-42b9-9e5d-183998bc5d22-kube-api-access-lfjqc\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.691134 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-combined-ca-bundle\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.692205 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-config-data\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.692259 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-combined-ca-bundle\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.692352 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-etc-machine-id\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.704185 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-config\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.705399 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-gr825"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.709023 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-combined-ca-bundle\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.710067 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-scripts\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.715939 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-combined-ca-bundle\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.718118 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-config-data\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.727112 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-db-sync-config-data\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.728053 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v94d8\" (UniqueName: \"kubernetes.io/projected/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-kube-api-access-v94d8\") pod \"cinder-db-sync-k4phl\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.778673 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfjqc\" (UniqueName: \"kubernetes.io/projected/46c7155a-444a-42b9-9e5d-183998bc5d22-kube-api-access-lfjqc\") pod \"neutron-db-sync-jbqqk\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.785414 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797231 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-db-sync-config-data\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797287 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nktdd\" (UniqueName: \"kubernetes.io/projected/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-kube-api-access-nktdd\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797330 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-config-data\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797361 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797394 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-combined-ca-bundle\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797435 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797453 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797495 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9b82\" (UniqueName: \"kubernetes.io/projected/f6e7509b-d406-4a52-b5e9-6ba1589d2217-kube-api-access-v9b82\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.797577 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-scripts\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.801956 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-combined-ca-bundle\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.806980 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-db-sync-config-data\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.813501 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-k4phl" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.825360 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nktdd\" (UniqueName: \"kubernetes.io/projected/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-kube-api-access-nktdd\") pod \"barbican-db-sync-gr825\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.858176 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gr825" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.896418 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-9tdw7"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.899660 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-config-data\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.899728 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.899845 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.899878 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.899932 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9b82\" (UniqueName: \"kubernetes.io/projected/f6e7509b-d406-4a52-b5e9-6ba1589d2217-kube-api-access-v9b82\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.900050 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.900114 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-scripts\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.903002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.904561 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.910405 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.911397 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-scripts\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.912033 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.917779 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-config-data\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.939861 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9b82\" (UniqueName: \"kubernetes.io/projected/f6e7509b-d406-4a52-b5e9-6ba1589d2217-kube-api-access-v9b82\") pod \"ceilometer-0\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " pod="openstack/ceilometer-0" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.983075 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jjnhr"] Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.984923 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.992481 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-h627x" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.992778 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.993404 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 30 21:47:39 crc kubenswrapper[4721]: I0130 21:47:39.999669 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-5sg8c"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.006707 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.029833 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jjnhr"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.054356 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-5sg8c"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.068111 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-b98rm"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.070042 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.074264 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-sss92" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.075261 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.080375 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-b98rm"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.084171 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.084225 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.085919 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.106988 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107106 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgzdc\" (UniqueName: \"kubernetes.io/projected/4698d6a6-e501-4f42-b6d0-172334487746-kube-api-access-lgzdc\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107145 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107187 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107219 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41614e72-cf2f-43c9-a879-f4c76ff277d5-logs\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107246 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gkml\" (UniqueName: \"kubernetes.io/projected/41614e72-cf2f-43c9-a879-f4c76ff277d5-kube-api-access-4gkml\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107264 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-config\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107290 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-scripts\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107321 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-config-data\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107351 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-combined-ca-bundle\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.107378 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.192364 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.210635 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-certs\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.210946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxqlc\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-kube-api-access-gxqlc\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.210976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-config-data\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.210998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgzdc\" (UniqueName: \"kubernetes.io/projected/4698d6a6-e501-4f42-b6d0-172334487746-kube-api-access-lgzdc\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211037 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211070 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211097 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41614e72-cf2f-43c9-a879-f4c76ff277d5-logs\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211131 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-combined-ca-bundle\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211164 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gkml\" (UniqueName: \"kubernetes.io/projected/41614e72-cf2f-43c9-a879-f4c76ff277d5-kube-api-access-4gkml\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211185 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-config\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211220 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-scripts\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211240 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-config-data\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211270 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-combined-ca-bundle\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211291 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-scripts\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211329 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.211353 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.214010 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.215209 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.215492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41614e72-cf2f-43c9-a879-f4c76ff277d5-logs\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.216480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-config\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.220181 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.220818 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.224854 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-scripts\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.226634 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-combined-ca-bundle\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.238592 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-config-data\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.247342 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgzdc\" (UniqueName: \"kubernetes.io/projected/4698d6a6-e501-4f42-b6d0-172334487746-kube-api-access-lgzdc\") pod \"dnsmasq-dns-785d8bcb8c-5sg8c\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.279682 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gkml\" (UniqueName: \"kubernetes.io/projected/41614e72-cf2f-43c9-a879-f4c76ff277d5-kube-api-access-4gkml\") pod \"placement-db-sync-jjnhr\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.305911 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.317225 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-combined-ca-bundle\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.317381 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-scripts\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.317492 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-certs\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.317529 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxqlc\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-kube-api-access-gxqlc\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.317561 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-config-data\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.323086 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.332240 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnhr" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.351215 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.351567 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.353166 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.353634 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9k75v" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.356373 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.387147 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.399058 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxqlc\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-kube-api-access-gxqlc\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.442310 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-combined-ca-bundle\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.443766 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-certs\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.481262 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-config-data\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.493632 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-scripts\") pod \"cloudkitty-db-sync-b98rm\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504471 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkltv\" (UniqueName: \"kubernetes.io/projected/8609af93-d668-4e8a-9870-5c06becf486f-kube-api-access-tkltv\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504550 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504615 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-scripts\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504661 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-logs\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504710 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504864 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-config-data\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.504952 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.505022 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.554844 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.559783 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.563966 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.564622 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.577730 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-9tdw7"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.590622 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.608730 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609012 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609163 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkltv\" (UniqueName: \"kubernetes.io/projected/8609af93-d668-4e8a-9870-5c06becf486f-kube-api-access-tkltv\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609281 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609416 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-scripts\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609523 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-logs\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.609807 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-config-data\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.611121 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.616217 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-logs\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.616999 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.633780 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-scripts\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.634154 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-config-data\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.643616 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.643674 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7f3e3924f4a6ec4e0a2834994592735d1017c71a0fc9cab6b021afe197356bdd/globalmount\"" pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.648069 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.648568 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkltv\" (UniqueName: \"kubernetes.io/projected/8609af93-d668-4e8a-9870-5c06becf486f-kube-api-access-tkltv\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.694195 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-gr825"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.709113 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nfc55"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711599 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711700 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711744 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711788 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711819 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2ftw\" (UniqueName: \"kubernetes.io/projected/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-kube-api-access-t2ftw\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711919 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-logs\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.711990 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.712013 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.744976 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.762813 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:47:40 crc kubenswrapper[4721]: W0130 21:47:40.797664 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65808c08_fb8d_4039_a516_700ff573ca09.slice/crio-af64e2c7400f705e99829cbe26a1c73abf83982b49b4c7373f04cd3defc09e73 WatchSource:0}: Error finding container af64e2c7400f705e99829cbe26a1c73abf83982b49b4c7373f04cd3defc09e73: Status 404 returned error can't find the container with id af64e2c7400f705e99829cbe26a1c73abf83982b49b4c7373f04cd3defc09e73 Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814280 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-logs\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814443 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814503 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814563 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814588 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814629 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814679 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.814708 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2ftw\" (UniqueName: \"kubernetes.io/projected/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-kube-api-access-t2ftw\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.815485 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-logs\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.815754 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.824848 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.824901 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a2a66a61f2645de3c0e9d3f208d5113b5666c55647cddcb1d91cf50b3d6010ba/globalmount\"" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.830656 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.831345 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.831693 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.833618 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.843409 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2ftw\" (UniqueName: \"kubernetes.io/projected/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-kube-api-access-t2ftw\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.881086 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gr825" event={"ID":"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76","Type":"ContainerStarted","Data":"3f899a57976ffed630242501b2bb5a94bbd4cc330cc5701e799f00370611b478"} Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.881717 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-k4phl"] Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.885330 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.886756 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nfc55" event={"ID":"65808c08-fb8d-4039-a516-700ff573ca09","Type":"ContainerStarted","Data":"af64e2c7400f705e99829cbe26a1c73abf83982b49b4c7373f04cd3defc09e73"} Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.889781 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" event={"ID":"82787ece-6910-4b49-a66f-fa2ea94c77b8","Type":"ContainerStarted","Data":"e3f2e7d2a4142e887e7ad0dcb1a8bf0e5b6f9b9e5eeb2fb34b0337ed20c00378"} Jan 30 21:47:40 crc kubenswrapper[4721]: I0130 21:47:40.991439 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.098364 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-jbqqk"] Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.128894 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.490892 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jjnhr"] Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.535346 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.550275 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-5sg8c"] Jan 30 21:47:41 crc kubenswrapper[4721]: W0130 21:47:41.566754 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4698d6a6_e501_4f42_b6d0_172334487746.slice/crio-0c3442c574ff2c1de7f6a86c6487c4611945d27c20a0255d99d92b713d9bd2f3 WatchSource:0}: Error finding container 0c3442c574ff2c1de7f6a86c6487c4611945d27c20a0255d99d92b713d9bd2f3: Status 404 returned error can't find the container with id 0c3442c574ff2c1de7f6a86c6487c4611945d27c20a0255d99d92b713d9bd2f3 Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.591420 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-b98rm"] Jan 30 21:47:41 crc kubenswrapper[4721]: W0130 21:47:41.597223 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbe7c436_51b8_4985_933d_fde2a16767bd.slice/crio-0668ba079f03cbff3281a2dd8184f095c960010b27be3e41543f871d66316e37 WatchSource:0}: Error finding container 0668ba079f03cbff3281a2dd8184f095c960010b27be3e41543f871d66316e37: Status 404 returned error can't find the container with id 0668ba079f03cbff3281a2dd8184f095c960010b27be3e41543f871d66316e37 Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.835783 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.938387 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e7509b-d406-4a52-b5e9-6ba1589d2217","Type":"ContainerStarted","Data":"3f140070e36a0d36b8d86a392a0b554898926f2130e97551141ac97742c6a127"} Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.944463 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" event={"ID":"4698d6a6-e501-4f42-b6d0-172334487746","Type":"ContainerStarted","Data":"0c3442c574ff2c1de7f6a86c6487c4611945d27c20a0255d99d92b713d9bd2f3"} Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.964963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jbqqk" event={"ID":"46c7155a-444a-42b9-9e5d-183998bc5d22","Type":"ContainerStarted","Data":"608d96ad0b85b91fb46b12fb868f1a60f508ed4a4382de60affb327f786c3aa7"} Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.965019 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jbqqk" event={"ID":"46c7155a-444a-42b9-9e5d-183998bc5d22","Type":"ContainerStarted","Data":"a10380d925fe1bc95a22c785413253939cd596828938677e3316078eb7de89d0"} Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.984962 4721 generic.go:334] "Generic (PLEG): container finished" podID="82787ece-6910-4b49-a66f-fa2ea94c77b8" containerID="c4dac3f794eeeb60b4b22b206173a2fbb873a49692a1a645841c20d5eca6f201" exitCode=0 Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.985042 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" event={"ID":"82787ece-6910-4b49-a66f-fa2ea94c77b8","Type":"ContainerDied","Data":"c4dac3f794eeeb60b4b22b206173a2fbb873a49692a1a645841c20d5eca6f201"} Jan 30 21:47:41 crc kubenswrapper[4721]: I0130 21:47:41.993419 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.013951 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnhr" event={"ID":"41614e72-cf2f-43c9-a879-f4c76ff277d5","Type":"ContainerStarted","Data":"7adfe4bc614600a0acf524f822eacdf170c431970119cc76426bd1216b3e22bb"} Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.027358 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-jbqqk" podStartSLOduration=3.027338074 podStartE2EDuration="3.027338074s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:42.002754831 +0000 UTC m=+1850.794656077" watchObservedRunningTime="2026-01-30 21:47:42.027338074 +0000 UTC m=+1850.819239320" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.038664 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nfc55" event={"ID":"65808c08-fb8d-4039-a516-700ff573ca09","Type":"ContainerStarted","Data":"799357dc87eb5d73f30e3ed2c08f7d262f8aaee4eb1d2c0890f02c937bdf096b"} Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.059587 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-b98rm" event={"ID":"fbe7c436-51b8-4985-933d-fde2a16767bd","Type":"ContainerStarted","Data":"0668ba079f03cbff3281a2dd8184f095c960010b27be3e41543f871d66316e37"} Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.067721 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-nfc55" podStartSLOduration=3.067698246 podStartE2EDuration="3.067698246s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:42.06332952 +0000 UTC m=+1850.855230786" watchObservedRunningTime="2026-01-30 21:47:42.067698246 +0000 UTC m=+1850.859599492" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.088680 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-k4phl" event={"ID":"710ef32f-2c64-4aea-a0d4-ea18b41e4f10","Type":"ContainerStarted","Data":"ec124e0bd3fd71f6373da34adb0bad5dde0956133e764f252675e76a5ad99850"} Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.177748 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.264796 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.323602 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.705450 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.841480 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-swift-storage-0\") pod \"82787ece-6910-4b49-a66f-fa2ea94c77b8\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.841822 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-sb\") pod \"82787ece-6910-4b49-a66f-fa2ea94c77b8\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.841867 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-config\") pod \"82787ece-6910-4b49-a66f-fa2ea94c77b8\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.841947 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-svc\") pod \"82787ece-6910-4b49-a66f-fa2ea94c77b8\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.842008 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-nb\") pod \"82787ece-6910-4b49-a66f-fa2ea94c77b8\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.842087 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmfks\" (UniqueName: \"kubernetes.io/projected/82787ece-6910-4b49-a66f-fa2ea94c77b8-kube-api-access-pmfks\") pod \"82787ece-6910-4b49-a66f-fa2ea94c77b8\" (UID: \"82787ece-6910-4b49-a66f-fa2ea94c77b8\") " Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.849235 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82787ece-6910-4b49-a66f-fa2ea94c77b8-kube-api-access-pmfks" (OuterVolumeSpecName: "kube-api-access-pmfks") pod "82787ece-6910-4b49-a66f-fa2ea94c77b8" (UID: "82787ece-6910-4b49-a66f-fa2ea94c77b8"). InnerVolumeSpecName "kube-api-access-pmfks". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.880771 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "82787ece-6910-4b49-a66f-fa2ea94c77b8" (UID: "82787ece-6910-4b49-a66f-fa2ea94c77b8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.883725 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-config" (OuterVolumeSpecName: "config") pod "82787ece-6910-4b49-a66f-fa2ea94c77b8" (UID: "82787ece-6910-4b49-a66f-fa2ea94c77b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.890592 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "82787ece-6910-4b49-a66f-fa2ea94c77b8" (UID: "82787ece-6910-4b49-a66f-fa2ea94c77b8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.924199 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "82787ece-6910-4b49-a66f-fa2ea94c77b8" (UID: "82787ece-6910-4b49-a66f-fa2ea94c77b8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.924642 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82787ece-6910-4b49-a66f-fa2ea94c77b8" (UID: "82787ece-6910-4b49-a66f-fa2ea94c77b8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.944814 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.944858 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.944871 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.944881 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.944890 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82787ece-6910-4b49-a66f-fa2ea94c77b8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:42 crc kubenswrapper[4721]: I0130 21:47:42.944899 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmfks\" (UniqueName: \"kubernetes.io/projected/82787ece-6910-4b49-a66f-fa2ea94c77b8-kube-api-access-pmfks\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.130155 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e70d0f13-a5f9-44f5-b79d-a88bcfb99923","Type":"ContainerStarted","Data":"92a32fd4512efdf6f7dc4e785c400df928d4c7d5ec6f061e20ad7e90b8ff2e93"} Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.134585 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" event={"ID":"82787ece-6910-4b49-a66f-fa2ea94c77b8","Type":"ContainerDied","Data":"e3f2e7d2a4142e887e7ad0dcb1a8bf0e5b6f9b9e5eeb2fb34b0337ed20c00378"} Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.134649 4721 scope.go:117] "RemoveContainer" containerID="c4dac3f794eeeb60b4b22b206173a2fbb873a49692a1a645841c20d5eca6f201" Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.135370 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-9tdw7" Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.138774 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8609af93-d668-4e8a-9870-5c06becf486f","Type":"ContainerStarted","Data":"e629da5868ee7c4339204f81b3ec9394c3fbbb5a686df1bd10c96262e997b61a"} Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.150526 4721 generic.go:334] "Generic (PLEG): container finished" podID="4698d6a6-e501-4f42-b6d0-172334487746" containerID="c385d6b39276c63483dcf4bc9dedd26e84d677c28450e2197db99399ab655234" exitCode=0 Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.150764 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" event={"ID":"4698d6a6-e501-4f42-b6d0-172334487746","Type":"ContainerDied","Data":"c385d6b39276c63483dcf4bc9dedd26e84d677c28450e2197db99399ab655234"} Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.294983 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-9tdw7"] Jan 30 21:47:43 crc kubenswrapper[4721]: I0130 21:47:43.295094 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-9tdw7"] Jan 30 21:47:44 crc kubenswrapper[4721]: I0130 21:47:44.107916 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82787ece-6910-4b49-a66f-fa2ea94c77b8" path="/var/lib/kubelet/pods/82787ece-6910-4b49-a66f-fa2ea94c77b8/volumes" Jan 30 21:47:45 crc kubenswrapper[4721]: I0130 21:47:45.180606 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8609af93-d668-4e8a-9870-5c06becf486f","Type":"ContainerStarted","Data":"afae43b438a631fdef96afcae7fe1c842599090c45579e30cd1330541c027b3b"} Jan 30 21:47:45 crc kubenswrapper[4721]: I0130 21:47:45.183564 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" event={"ID":"4698d6a6-e501-4f42-b6d0-172334487746","Type":"ContainerStarted","Data":"8cee18d15c81ed5577ae43697288f605301f7f1175647c1b407e1e42044441d6"} Jan 30 21:47:45 crc kubenswrapper[4721]: I0130 21:47:45.184622 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:45 crc kubenswrapper[4721]: I0130 21:47:45.186874 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e70d0f13-a5f9-44f5-b79d-a88bcfb99923","Type":"ContainerStarted","Data":"1953b870e28951079a1bf6f916dec1459d856d4cc75e7fc741b3a92f616b4d03"} Jan 30 21:47:45 crc kubenswrapper[4721]: I0130 21:47:45.211521 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" podStartSLOduration=6.211501105 podStartE2EDuration="6.211501105s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:45.206017715 +0000 UTC m=+1853.997918981" watchObservedRunningTime="2026-01-30 21:47:45.211501105 +0000 UTC m=+1854.003402351" Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.206148 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8609af93-d668-4e8a-9870-5c06becf486f","Type":"ContainerStarted","Data":"e82a358abb5a18628caf732080732e283cf518e9c1fd528c33115cf4446a17bd"} Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.206327 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-log" containerID="cri-o://afae43b438a631fdef96afcae7fe1c842599090c45579e30cd1330541c027b3b" gracePeriod=30 Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.206369 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-httpd" containerID="cri-o://e82a358abb5a18628caf732080732e283cf518e9c1fd528c33115cf4446a17bd" gracePeriod=30 Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.224454 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-log" containerID="cri-o://1953b870e28951079a1bf6f916dec1459d856d4cc75e7fc741b3a92f616b4d03" gracePeriod=30 Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.224661 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-httpd" containerID="cri-o://978798a3bea8af3e753394bf260b4132a08cfaa2c100b30ebd37a1e5f6e1af3b" gracePeriod=30 Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.224946 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e70d0f13-a5f9-44f5-b79d-a88bcfb99923","Type":"ContainerStarted","Data":"978798a3bea8af3e753394bf260b4132a08cfaa2c100b30ebd37a1e5f6e1af3b"} Jan 30 21:47:47 crc kubenswrapper[4721]: I0130 21:47:47.248244 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.248223932 podStartE2EDuration="8.248223932s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:47.229394048 +0000 UTC m=+1856.021295314" watchObservedRunningTime="2026-01-30 21:47:47.248223932 +0000 UTC m=+1856.040125178" Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.241473 4721 generic.go:334] "Generic (PLEG): container finished" podID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerID="978798a3bea8af3e753394bf260b4132a08cfaa2c100b30ebd37a1e5f6e1af3b" exitCode=143 Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.242114 4721 generic.go:334] "Generic (PLEG): container finished" podID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerID="1953b870e28951079a1bf6f916dec1459d856d4cc75e7fc741b3a92f616b4d03" exitCode=143 Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.241621 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e70d0f13-a5f9-44f5-b79d-a88bcfb99923","Type":"ContainerDied","Data":"978798a3bea8af3e753394bf260b4132a08cfaa2c100b30ebd37a1e5f6e1af3b"} Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.242201 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e70d0f13-a5f9-44f5-b79d-a88bcfb99923","Type":"ContainerDied","Data":"1953b870e28951079a1bf6f916dec1459d856d4cc75e7fc741b3a92f616b4d03"} Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.248066 4721 generic.go:334] "Generic (PLEG): container finished" podID="8609af93-d668-4e8a-9870-5c06becf486f" containerID="e82a358abb5a18628caf732080732e283cf518e9c1fd528c33115cf4446a17bd" exitCode=143 Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.248109 4721 generic.go:334] "Generic (PLEG): container finished" podID="8609af93-d668-4e8a-9870-5c06becf486f" containerID="afae43b438a631fdef96afcae7fe1c842599090c45579e30cd1330541c027b3b" exitCode=143 Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.248140 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8609af93-d668-4e8a-9870-5c06becf486f","Type":"ContainerDied","Data":"e82a358abb5a18628caf732080732e283cf518e9c1fd528c33115cf4446a17bd"} Jan 30 21:47:48 crc kubenswrapper[4721]: I0130 21:47:48.248193 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8609af93-d668-4e8a-9870-5c06becf486f","Type":"ContainerDied","Data":"afae43b438a631fdef96afcae7fe1c842599090c45579e30cd1330541c027b3b"} Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.272241 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8609af93-d668-4e8a-9870-5c06becf486f","Type":"ContainerDied","Data":"e629da5868ee7c4339204f81b3ec9394c3fbbb5a686df1bd10c96262e997b61a"} Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.272644 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e629da5868ee7c4339204f81b3ec9394c3fbbb5a686df1bd10c96262e997b61a" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.274460 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e70d0f13-a5f9-44f5-b79d-a88bcfb99923","Type":"ContainerDied","Data":"92a32fd4512efdf6f7dc4e785c400df928d4c7d5ec6f061e20ad7e90b8ff2e93"} Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.274502 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92a32fd4512efdf6f7dc4e785c400df928d4c7d5ec6f061e20ad7e90b8ff2e93" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.289746 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.295650 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.323921 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.323904619 podStartE2EDuration="11.323904619s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:47:47.27779555 +0000 UTC m=+1856.069696796" watchObservedRunningTime="2026-01-30 21:47:50.323904619 +0000 UTC m=+1859.115805865" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.358616 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.430661 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jpkbv"] Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.431174 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" containerID="cri-o://0e06051a923deceb6b1c43d8e0d056a958516e7c093b7447ef4d3de26f5673c4" gracePeriod=10 Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.444665 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-logs\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.444739 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-combined-ca-bundle\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.444774 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-scripts\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.444805 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2ftw\" (UniqueName: \"kubernetes.io/projected/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-kube-api-access-t2ftw\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.444856 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-config-data\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.444902 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-combined-ca-bundle\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445030 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445092 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-internal-tls-certs\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445119 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-logs\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445191 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-config-data\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445314 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445352 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-httpd-run\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445435 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-httpd-run\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445500 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-public-tls-certs\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445529 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-scripts\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.445558 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkltv\" (UniqueName: \"kubernetes.io/projected/8609af93-d668-4e8a-9870-5c06becf486f-kube-api-access-tkltv\") pod \"8609af93-d668-4e8a-9870-5c06becf486f\" (UID: \"8609af93-d668-4e8a-9870-5c06becf486f\") " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.450909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.452743 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-kube-api-access-t2ftw" (OuterVolumeSpecName: "kube-api-access-t2ftw") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "kube-api-access-t2ftw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.456976 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-scripts" (OuterVolumeSpecName: "scripts") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.457341 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-logs" (OuterVolumeSpecName: "logs") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.460590 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-logs" (OuterVolumeSpecName: "logs") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.467676 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.475571 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-scripts" (OuterVolumeSpecName: "scripts") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.501571 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8609af93-d668-4e8a-9870-5c06becf486f-kube-api-access-tkltv" (OuterVolumeSpecName: "kube-api-access-tkltv") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "kube-api-access-tkltv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.525226 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799" (OuterVolumeSpecName: "glance") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "pvc-041364f8-81b5-40ba-86c2-556e83a73799". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: E0130 21:47:50.528588 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db podName:e70d0f13-a5f9-44f5-b79d-a88bcfb99923 nodeName:}" failed. No retries permitted until 2026-01-30 21:47:51.028562137 +0000 UTC m=+1859.820463383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "glance" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923") : kubernetes.io/csi: Unmounter.TearDownAt failed: rpc error: code = Unknown desc = check target path: could not get consistent content of /proc/mounts after 3 attempts Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.534382 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554743 4721 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554783 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554796 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkltv\" (UniqueName: \"kubernetes.io/projected/8609af93-d668-4e8a-9870-5c06becf486f-kube-api-access-tkltv\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554809 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554825 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554836 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554846 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2ftw\" (UniqueName: \"kubernetes.io/projected/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-kube-api-access-t2ftw\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554856 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554885 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") on node \"crc\" " Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.554898 4721 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8609af93-d668-4e8a-9870-5c06becf486f-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.593857 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-config-data" (OuterVolumeSpecName: "config-data") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.596802 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8609af93-d668-4e8a-9870-5c06becf486f" (UID: "8609af93-d668-4e8a-9870-5c06becf486f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.619470 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.620707 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-config-data" (OuterVolumeSpecName: "config-data") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.625705 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.625865 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-041364f8-81b5-40ba-86c2-556e83a73799" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799") on node "crc" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.664966 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.665023 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.665069 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.665095 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.665156 4721 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8609af93-d668-4e8a-9870-5c06becf486f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:50 crc kubenswrapper[4721]: W0130 21:47:50.694731 4721 container.go:586] Failed to update stats for container "/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9331d226_1386_4cbb_9e87_ba2c41ac2a1b.slice/crio-0cd921d4744c3c5acfedb1914924ab3dd717d16ee2050e516881dd022bb683fc": error while statting cgroup v2: [openat2 /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9331d226_1386_4cbb_9e87_ba2c41ac2a1b.slice/crio-0cd921d4744c3c5acfedb1914924ab3dd717d16ee2050e516881dd022bb683fc/memory.max: no such device], continuing to push stats Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.700446 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:47:50 crc kubenswrapper[4721]: I0130 21:47:50.771280 4721 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e70d0f13-a5f9-44f5-b79d-a88bcfb99923-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.078289 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\" (UID: \"e70d0f13-a5f9-44f5-b79d-a88bcfb99923\") " Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.096914 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db" (OuterVolumeSpecName: "glance") pod "e70d0f13-a5f9-44f5-b79d-a88bcfb99923" (UID: "e70d0f13-a5f9-44f5-b79d-a88bcfb99923"). InnerVolumeSpecName "pvc-84c5d821-7abb-4e56-9189-5550dfb556db". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.181018 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") on node \"crc\" " Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.210393 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.210693 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-84c5d821-7abb-4e56-9189-5550dfb556db" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db") on node "crc" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.282703 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") on node \"crc\" DevicePath \"\"" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.292957 4721 generic.go:334] "Generic (PLEG): container finished" podID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerID="0e06051a923deceb6b1c43d8e0d056a958516e7c093b7447ef4d3de26f5673c4" exitCode=0 Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.293072 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.297441 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" event={"ID":"9331d226-1386-4cbb-9e87-ba2c41ac2a1b","Type":"ContainerDied","Data":"0e06051a923deceb6b1c43d8e0d056a958516e7c093b7447ef4d3de26f5673c4"} Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.297490 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.354675 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.399092 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.418509 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434166 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: E0130 21:47:51.434654 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-httpd" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434672 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-httpd" Jan 30 21:47:51 crc kubenswrapper[4721]: E0130 21:47:51.434697 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-log" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434703 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-log" Jan 30 21:47:51 crc kubenswrapper[4721]: E0130 21:47:51.434714 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-httpd" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434719 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-httpd" Jan 30 21:47:51 crc kubenswrapper[4721]: E0130 21:47:51.434729 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82787ece-6910-4b49-a66f-fa2ea94c77b8" containerName="init" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434735 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="82787ece-6910-4b49-a66f-fa2ea94c77b8" containerName="init" Jan 30 21:47:51 crc kubenswrapper[4721]: E0130 21:47:51.434752 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-log" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434758 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-log" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434921 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="82787ece-6910-4b49-a66f-fa2ea94c77b8" containerName="init" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434932 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-httpd" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434941 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-httpd" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434953 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8609af93-d668-4e8a-9870-5c06becf486f" containerName="glance-log" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.434967 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" containerName="glance-log" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.436273 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.439590 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9k75v" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.439761 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.439871 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.440003 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.444520 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.458042 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.469679 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.471771 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.476484 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.477133 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.481482 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.591913 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.591961 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.591999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592073 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592100 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592146 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592179 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592205 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-scripts\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592225 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-logs\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592251 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592275 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf8sf\" (UniqueName: \"kubernetes.io/projected/1c10a349-defd-4a05-a317-a392fad3219f-kube-api-access-lf8sf\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592333 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmwrg\" (UniqueName: \"kubernetes.io/projected/827a4c80-2e4a-4f13-a78c-1583f776cd6c-kube-api-access-tmwrg\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592364 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592395 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592413 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-config-data\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.592437 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-logs\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694108 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694533 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694561 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-config-data\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694601 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-logs\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694678 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694718 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694759 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694796 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694855 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694905 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694961 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-scripts\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.694988 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-logs\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.695032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.695067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf8sf\" (UniqueName: \"kubernetes.io/projected/1c10a349-defd-4a05-a317-a392fad3219f-kube-api-access-lf8sf\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.695123 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmwrg\" (UniqueName: \"kubernetes.io/projected/827a4c80-2e4a-4f13-a78c-1583f776cd6c-kube-api-access-tmwrg\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.696030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-logs\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.696463 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.697065 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-logs\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.698840 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.702474 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.704107 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.708281 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.708396 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.709208 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-config-data\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.709945 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.720216 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-scripts\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.723459 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.740365 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmwrg\" (UniqueName: \"kubernetes.io/projected/827a4c80-2e4a-4f13-a78c-1583f776cd6c-kube-api-access-tmwrg\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.744937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf8sf\" (UniqueName: \"kubernetes.io/projected/1c10a349-defd-4a05-a317-a392fad3219f-kube-api-access-lf8sf\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.758406 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.759051 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7f3e3924f4a6ec4e0a2834994592735d1017c71a0fc9cab6b021afe197356bdd/globalmount\"" pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.758838 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.759358 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a2a66a61f2645de3c0e9d3f208d5113b5666c55647cddcb1d91cf50b3d6010ba/globalmount\"" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.970621 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " pod="openstack/glance-default-external-api-0" Jan 30 21:47:51 crc kubenswrapper[4721]: I0130 21:47:51.993881 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:47:52 crc kubenswrapper[4721]: I0130 21:47:52.105863 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8609af93-d668-4e8a-9870-5c06becf486f" path="/var/lib/kubelet/pods/8609af93-d668-4e8a-9870-5c06becf486f/volumes" Jan 30 21:47:52 crc kubenswrapper[4721]: I0130 21:47:52.109014 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e70d0f13-a5f9-44f5-b79d-a88bcfb99923" path="/var/lib/kubelet/pods/e70d0f13-a5f9-44f5-b79d-a88bcfb99923/volumes" Jan 30 21:47:52 crc kubenswrapper[4721]: I0130 21:47:52.159274 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9k75v" Jan 30 21:47:52 crc kubenswrapper[4721]: I0130 21:47:52.171508 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:47:52 crc kubenswrapper[4721]: I0130 21:47:52.176547 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:47:54 crc kubenswrapper[4721]: I0130 21:47:54.448676 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:47:59 crc kubenswrapper[4721]: I0130 21:47:59.448527 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:48:04 crc kubenswrapper[4721]: I0130 21:48:04.449137 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:48:04 crc kubenswrapper[4721]: I0130 21:48:04.451089 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:48:08 crc kubenswrapper[4721]: E0130 21:48:08.618585 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Jan 30 21:48:08 crc kubenswrapper[4721]: E0130 21:48:08.619497 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nktdd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-gr825_openstack(bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:48:08 crc kubenswrapper[4721]: E0130 21:48:08.620671 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-gr825" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" Jan 30 21:48:09 crc kubenswrapper[4721]: I0130 21:48:09.449099 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:48:09 crc kubenswrapper[4721]: E0130 21:48:09.629688 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-gr825" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" Jan 30 21:48:14 crc kubenswrapper[4721]: I0130 21:48:14.448187 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:48:19 crc kubenswrapper[4721]: I0130 21:48:19.449747 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:48:24 crc kubenswrapper[4721]: I0130 21:48:24.448477 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: connect: connection refused" Jan 30 21:48:26 crc kubenswrapper[4721]: I0130 21:48:26.814574 4721 generic.go:334] "Generic (PLEG): container finished" podID="65808c08-fb8d-4039-a516-700ff573ca09" containerID="799357dc87eb5d73f30e3ed2c08f7d262f8aaee4eb1d2c0890f02c937bdf096b" exitCode=0 Jan 30 21:48:26 crc kubenswrapper[4721]: I0130 21:48:26.814687 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nfc55" event={"ID":"65808c08-fb8d-4039-a516-700ff573ca09","Type":"ContainerDied","Data":"799357dc87eb5d73f30e3ed2c08f7d262f8aaee4eb1d2c0890f02c937bdf096b"} Jan 30 21:48:28 crc kubenswrapper[4721]: E0130 21:48:28.859961 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Jan 30 21:48:28 crc kubenswrapper[4721]: E0130 21:48:28.860673 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v94d8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-k4phl_openstack(710ef32f-2c64-4aea-a0d4-ea18b41e4f10): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:48:28 crc kubenswrapper[4721]: E0130 21:48:28.861848 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-k4phl" podUID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" Jan 30 21:48:29 crc kubenswrapper[4721]: E0130 21:48:29.843405 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-k4phl" podUID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" Jan 30 21:48:34 crc kubenswrapper[4721]: I0130 21:48:34.448990 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: i/o timeout" Jan 30 21:48:39 crc kubenswrapper[4721]: I0130 21:48:39.450244 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: i/o timeout" Jan 30 21:48:44 crc kubenswrapper[4721]: I0130 21:48:44.451678 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: i/o timeout" Jan 30 21:48:49 crc kubenswrapper[4721]: I0130 21:48:49.453556 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: i/o timeout" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.586994 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.594664 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691324 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-config-data\") pod \"65808c08-fb8d-4039-a516-700ff573ca09\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691382 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-credential-keys\") pod \"65808c08-fb8d-4039-a516-700ff573ca09\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691418 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-config\") pod \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691451 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-scripts\") pod \"65808c08-fb8d-4039-a516-700ff573ca09\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691484 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-sb\") pod \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691508 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-nb\") pod \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691531 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-fernet-keys\") pod \"65808c08-fb8d-4039-a516-700ff573ca09\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691562 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-combined-ca-bundle\") pod \"65808c08-fb8d-4039-a516-700ff573ca09\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.691597 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-svc\") pod \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.716822 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-scripts" (OuterVolumeSpecName: "scripts") pod "65808c08-fb8d-4039-a516-700ff573ca09" (UID: "65808c08-fb8d-4039-a516-700ff573ca09"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.717180 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "65808c08-fb8d-4039-a516-700ff573ca09" (UID: "65808c08-fb8d-4039-a516-700ff573ca09"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.750384 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "65808c08-fb8d-4039-a516-700ff573ca09" (UID: "65808c08-fb8d-4039-a516-700ff573ca09"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.794123 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w29st\" (UniqueName: \"kubernetes.io/projected/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-kube-api-access-w29st\") pod \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.794440 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7lhw\" (UniqueName: \"kubernetes.io/projected/65808c08-fb8d-4039-a516-700ff573ca09-kube-api-access-x7lhw\") pod \"65808c08-fb8d-4039-a516-700ff573ca09\" (UID: \"65808c08-fb8d-4039-a516-700ff573ca09\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.794542 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-swift-storage-0\") pod \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\" (UID: \"9331d226-1386-4cbb-9e87-ba2c41ac2a1b\") " Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.795005 4721 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.795077 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.795153 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.806561 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-config-data" (OuterVolumeSpecName: "config-data") pod "65808c08-fb8d-4039-a516-700ff573ca09" (UID: "65808c08-fb8d-4039-a516-700ff573ca09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.811405 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65808c08-fb8d-4039-a516-700ff573ca09-kube-api-access-x7lhw" (OuterVolumeSpecName: "kube-api-access-x7lhw") pod "65808c08-fb8d-4039-a516-700ff573ca09" (UID: "65808c08-fb8d-4039-a516-700ff573ca09"). InnerVolumeSpecName "kube-api-access-x7lhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.835512 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-kube-api-access-w29st" (OuterVolumeSpecName: "kube-api-access-w29st") pod "9331d226-1386-4cbb-9e87-ba2c41ac2a1b" (UID: "9331d226-1386-4cbb-9e87-ba2c41ac2a1b"). InnerVolumeSpecName "kube-api-access-w29st". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.856909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65808c08-fb8d-4039-a516-700ff573ca09" (UID: "65808c08-fb8d-4039-a516-700ff573ca09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.859808 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9331d226-1386-4cbb-9e87-ba2c41ac2a1b" (UID: "9331d226-1386-4cbb-9e87-ba2c41ac2a1b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.863100 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9331d226-1386-4cbb-9e87-ba2c41ac2a1b" (UID: "9331d226-1386-4cbb-9e87-ba2c41ac2a1b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.865320 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-config" (OuterVolumeSpecName: "config") pod "9331d226-1386-4cbb-9e87-ba2c41ac2a1b" (UID: "9331d226-1386-4cbb-9e87-ba2c41ac2a1b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.879499 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9331d226-1386-4cbb-9e87-ba2c41ac2a1b" (UID: "9331d226-1386-4cbb-9e87-ba2c41ac2a1b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.890478 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9331d226-1386-4cbb-9e87-ba2c41ac2a1b" (UID: "9331d226-1386-4cbb-9e87-ba2c41ac2a1b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.896958 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897188 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897208 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897232 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897245 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65808c08-fb8d-4039-a516-700ff573ca09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897256 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897268 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w29st\" (UniqueName: \"kubernetes.io/projected/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-kube-api-access-w29st\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897280 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7lhw\" (UniqueName: \"kubernetes.io/projected/65808c08-fb8d-4039-a516-700ff573ca09-kube-api-access-x7lhw\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:53 crc kubenswrapper[4721]: I0130 21:48:53.897292 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9331d226-1386-4cbb-9e87-ba2c41ac2a1b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.132792 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nfc55" event={"ID":"65808c08-fb8d-4039-a516-700ff573ca09","Type":"ContainerDied","Data":"af64e2c7400f705e99829cbe26a1c73abf83982b49b4c7373f04cd3defc09e73"} Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.132844 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af64e2c7400f705e99829cbe26a1c73abf83982b49b4c7373f04cd3defc09e73" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.132805 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nfc55" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.136811 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" event={"ID":"9331d226-1386-4cbb-9e87-ba2c41ac2a1b","Type":"ContainerDied","Data":"0cd921d4744c3c5acfedb1914924ab3dd717d16ee2050e516881dd022bb683fc"} Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.136864 4721 scope.go:117] "RemoveContainer" containerID="0e06051a923deceb6b1c43d8e0d056a958516e7c093b7447ef4d3de26f5673c4" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.136897 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.163620 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jpkbv"] Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.171742 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jpkbv"] Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.454974 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jpkbv" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.160:5353: i/o timeout" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.736061 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-nfc55"] Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.745325 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-nfc55"] Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.827062 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5kbhd"] Jan 30 21:48:54 crc kubenswrapper[4721]: E0130 21:48:54.827884 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="init" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.827911 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="init" Jan 30 21:48:54 crc kubenswrapper[4721]: E0130 21:48:54.827952 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.827960 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" Jan 30 21:48:54 crc kubenswrapper[4721]: E0130 21:48:54.827970 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65808c08-fb8d-4039-a516-700ff573ca09" containerName="keystone-bootstrap" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.827976 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="65808c08-fb8d-4039-a516-700ff573ca09" containerName="keystone-bootstrap" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.828220 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" containerName="dnsmasq-dns" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.828233 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="65808c08-fb8d-4039-a516-700ff573ca09" containerName="keystone-bootstrap" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.829081 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.835122 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.835235 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qfzgk" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.835282 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.835463 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.855239 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5kbhd"] Jan 30 21:48:54 crc kubenswrapper[4721]: E0130 21:48:54.886832 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Jan 30 21:48:54 crc kubenswrapper[4721]: E0130 21:48:54.887506 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n648h585h8bh558h596h7dh5d6h77h56dhdh558hd9h6fh695h598h5c4h559h5h54fh7fh64bh645hfbh674h5bch5cbh659h5b4h5f8hb8hcfh5cdq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v9b82,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(f6e7509b-d406-4a52-b5e9-6ba1589d2217): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.919602 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-credential-keys\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.919678 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-fernet-keys\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.919738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-config-data\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.919771 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-scripts\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.919792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cbzd\" (UniqueName: \"kubernetes.io/projected/aa298585-a353-4910-9cb2-6527745b5811-kube-api-access-7cbzd\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:54 crc kubenswrapper[4721]: I0130 21:48:54.919814 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-combined-ca-bundle\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.021445 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-credential-keys\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.021579 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-fernet-keys\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.021674 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-config-data\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.021737 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-scripts\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.021770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cbzd\" (UniqueName: \"kubernetes.io/projected/aa298585-a353-4910-9cb2-6527745b5811-kube-api-access-7cbzd\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.021806 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-combined-ca-bundle\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.027060 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-combined-ca-bundle\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.027740 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-config-data\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.028814 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-credential-keys\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.042161 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-fernet-keys\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.042507 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-scripts\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.045499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cbzd\" (UniqueName: \"kubernetes.io/projected/aa298585-a353-4910-9cb2-6527745b5811-kube-api-access-7cbzd\") pod \"keystone-bootstrap-5kbhd\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: I0130 21:48:55.158421 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:48:55 crc kubenswrapper[4721]: E0130 21:48:55.252569 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Jan 30 21:48:55 crc kubenswrapper[4721]: E0130 21:48:55.252732 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nktdd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-gr825_openstack(bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:48:55 crc kubenswrapper[4721]: E0130 21:48:55.254010 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-gr825" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" Jan 30 21:48:56 crc kubenswrapper[4721]: I0130 21:48:56.103289 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65808c08-fb8d-4039-a516-700ff573ca09" path="/var/lib/kubelet/pods/65808c08-fb8d-4039-a516-700ff573ca09/volumes" Jan 30 21:48:56 crc kubenswrapper[4721]: I0130 21:48:56.104169 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9331d226-1386-4cbb-9e87-ba2c41ac2a1b" path="/var/lib/kubelet/pods/9331d226-1386-4cbb-9e87-ba2c41ac2a1b/volumes" Jan 30 21:48:57 crc kubenswrapper[4721]: I0130 21:48:57.580835 4721 scope.go:117] "RemoveContainer" containerID="74f549dc3ee116341bb80f53f6961d1487184a589ad71424239bc7352644d9a2" Jan 30 21:48:58 crc kubenswrapper[4721]: I0130 21:48:58.151452 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:48:58 crc kubenswrapper[4721]: I0130 21:48:58.273431 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:49:03 crc kubenswrapper[4721]: I0130 21:49:03.225941 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"827a4c80-2e4a-4f13-a78c-1583f776cd6c","Type":"ContainerStarted","Data":"de13d489e8b641710d8e8adb557b41227fbc29555ccae4a351740363c002cc53"} Jan 30 21:49:03 crc kubenswrapper[4721]: I0130 21:49:03.231021 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1c10a349-defd-4a05-a317-a392fad3219f","Type":"ContainerStarted","Data":"14e1a44adb290c69154c4d5d06e0b9f3a1dbf27a1da6bd4ecc1e4de0e904fdab"} Jan 30 21:49:06 crc kubenswrapper[4721]: E0130 21:49:06.095503 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-gr825" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" Jan 30 21:49:07 crc kubenswrapper[4721]: I0130 21:49:07.939264 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5kbhd"] Jan 30 21:49:07 crc kubenswrapper[4721]: E0130 21:49:07.952335 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Jan 30 21:49:07 crc kubenswrapper[4721]: E0130 21:49:07.952396 4721 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Jan 30 21:49:07 crc kubenswrapper[4721]: E0130 21:49:07.952539 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gxqlc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-b98rm_openstack(fbe7c436-51b8-4985-933d-fde2a16767bd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:49:07 crc kubenswrapper[4721]: E0130 21:49:07.954865 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-b98rm" podUID="fbe7c436-51b8-4985-933d-fde2a16767bd" Jan 30 21:49:08 crc kubenswrapper[4721]: E0130 21:49:08.288325 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-b98rm" podUID="fbe7c436-51b8-4985-933d-fde2a16767bd" Jan 30 21:49:08 crc kubenswrapper[4721]: W0130 21:49:08.615483 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa298585_a353_4910_9cb2_6527745b5811.slice/crio-d979daaf70b81cddd2ad51601829d582b75fb9fd421708dfbe9423cc6487086d WatchSource:0}: Error finding container d979daaf70b81cddd2ad51601829d582b75fb9fd421708dfbe9423cc6487086d: Status 404 returned error can't find the container with id d979daaf70b81cddd2ad51601829d582b75fb9fd421708dfbe9423cc6487086d Jan 30 21:49:09 crc kubenswrapper[4721]: I0130 21:49:09.295701 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnhr" event={"ID":"41614e72-cf2f-43c9-a879-f4c76ff277d5","Type":"ContainerStarted","Data":"77b05f5f7abd17e94a5cbc6eb3c197cabe6aff5802e6809e1473bb0d66d5287e"} Jan 30 21:49:09 crc kubenswrapper[4721]: I0130 21:49:09.297766 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5kbhd" event={"ID":"aa298585-a353-4910-9cb2-6527745b5811","Type":"ContainerStarted","Data":"e4ef169e954c73d5d9db72483bd974c94e99b9061ee448c7ede797d1a4c31731"} Jan 30 21:49:09 crc kubenswrapper[4721]: I0130 21:49:09.297793 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5kbhd" event={"ID":"aa298585-a353-4910-9cb2-6527745b5811","Type":"ContainerStarted","Data":"d979daaf70b81cddd2ad51601829d582b75fb9fd421708dfbe9423cc6487086d"} Jan 30 21:49:09 crc kubenswrapper[4721]: I0130 21:49:09.319550 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jjnhr" podStartSLOduration=16.60222823 podStartE2EDuration="1m30.319530878s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="2026-01-30 21:47:41.494182876 +0000 UTC m=+1850.286084122" lastFinishedPulling="2026-01-30 21:48:55.211485524 +0000 UTC m=+1924.003386770" observedRunningTime="2026-01-30 21:49:09.310117906 +0000 UTC m=+1938.102019152" watchObservedRunningTime="2026-01-30 21:49:09.319530878 +0000 UTC m=+1938.111432124" Jan 30 21:49:10 crc kubenswrapper[4721]: I0130 21:49:10.309220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-k4phl" event={"ID":"710ef32f-2c64-4aea-a0d4-ea18b41e4f10","Type":"ContainerStarted","Data":"ca7aed5bf67815d1ae51471ebe7450ee976b5e2dfd0f0c7ef3e3b8ea9159b7e8"} Jan 30 21:49:10 crc kubenswrapper[4721]: I0130 21:49:10.311484 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"827a4c80-2e4a-4f13-a78c-1583f776cd6c","Type":"ContainerStarted","Data":"bd8207409fec4dde97a473bbdfb80877b921a2bf6bd9eb33ea48020e7f72abdb"} Jan 30 21:49:10 crc kubenswrapper[4721]: I0130 21:49:10.312891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1c10a349-defd-4a05-a317-a392fad3219f","Type":"ContainerStarted","Data":"bd78361dbf0c20ca745e3caa7cfbfe205c0893edd4e259a868e5416dc94e1d55"} Jan 30 21:49:10 crc kubenswrapper[4721]: I0130 21:49:10.348900 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-k4phl" podStartSLOduration=3.612753454 podStartE2EDuration="1m31.348875708s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="2026-01-30 21:47:40.895572857 +0000 UTC m=+1849.687474103" lastFinishedPulling="2026-01-30 21:49:08.631695091 +0000 UTC m=+1937.423596357" observedRunningTime="2026-01-30 21:49:10.332406437 +0000 UTC m=+1939.124307733" watchObservedRunningTime="2026-01-30 21:49:10.348875708 +0000 UTC m=+1939.140776974" Jan 30 21:49:10 crc kubenswrapper[4721]: I0130 21:49:10.356904 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5kbhd" podStartSLOduration=16.356887006 podStartE2EDuration="16.356887006s" podCreationTimestamp="2026-01-30 21:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:49:10.352315815 +0000 UTC m=+1939.144217061" watchObservedRunningTime="2026-01-30 21:49:10.356887006 +0000 UTC m=+1939.148788252" Jan 30 21:49:12 crc kubenswrapper[4721]: I0130 21:49:12.348777 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"827a4c80-2e4a-4f13-a78c-1583f776cd6c","Type":"ContainerStarted","Data":"b07c6536bdc26a5e54b1a16711ad9ac5b7ad43a8d189e4b09714c1def64f2899"} Jan 30 21:49:12 crc kubenswrapper[4721]: I0130 21:49:12.352237 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1c10a349-defd-4a05-a317-a392fad3219f","Type":"ContainerStarted","Data":"333e38ede45fb6e36c3f7950a1d5ce2ea248cff09092e32045ff9bdbd34cb9e6"} Jan 30 21:49:13 crc kubenswrapper[4721]: I0130 21:49:13.421586 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=82.42154953 podStartE2EDuration="1m22.42154953s" podCreationTimestamp="2026-01-30 21:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:49:13.402108457 +0000 UTC m=+1942.194009793" watchObservedRunningTime="2026-01-30 21:49:13.42154953 +0000 UTC m=+1942.213450816" Jan 30 21:49:13 crc kubenswrapper[4721]: I0130 21:49:13.432189 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=82.43216844 podStartE2EDuration="1m22.43216844s" podCreationTimestamp="2026-01-30 21:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:49:13.429458386 +0000 UTC m=+1942.221359702" watchObservedRunningTime="2026-01-30 21:49:13.43216844 +0000 UTC m=+1942.224069696" Jan 30 21:49:19 crc kubenswrapper[4721]: I0130 21:49:19.440938 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e7509b-d406-4a52-b5e9-6ba1589d2217","Type":"ContainerStarted","Data":"0743ad15c2a957b56e9a7cdef78066074f4cf54d60fc58b8a47459a84cb76faf"} Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.174952 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.175623 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.175672 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.175686 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.176792 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.177165 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.177369 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.177398 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.213045 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.215097 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.226473 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:22 crc kubenswrapper[4721]: I0130 21:49:22.231562 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 21:49:28 crc kubenswrapper[4721]: I0130 21:49:28.958607 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:28 crc kubenswrapper[4721]: I0130 21:49:28.959428 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:49:28 crc kubenswrapper[4721]: I0130 21:49:28.962171 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 21:49:28 crc kubenswrapper[4721]: I0130 21:49:28.966847 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 21:49:28 crc kubenswrapper[4721]: I0130 21:49:28.967013 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:49:28 crc kubenswrapper[4721]: I0130 21:49:28.968908 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 21:49:29 crc kubenswrapper[4721]: I0130 21:49:29.449637 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:49:29 crc kubenswrapper[4721]: I0130 21:49:29.450052 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:49:30 crc kubenswrapper[4721]: I0130 21:49:30.577756 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-b98rm" event={"ID":"fbe7c436-51b8-4985-933d-fde2a16767bd","Type":"ContainerStarted","Data":"569ac664f271273d28adff4b507b3b3f20c4031671b43bf4a29877af6b05c6b5"} Jan 30 21:49:30 crc kubenswrapper[4721]: I0130 21:49:30.583889 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gr825" event={"ID":"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76","Type":"ContainerStarted","Data":"b412d308c94e402ce85900f21e5966c4b248167bddfe3a2f59825b7513dd9036"} Jan 30 21:49:33 crc kubenswrapper[4721]: I0130 21:49:33.660781 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-b98rm" podStartSLOduration=7.807269975 podStartE2EDuration="1m54.660760349s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="2026-01-30 21:47:41.600662889 +0000 UTC m=+1850.392564135" lastFinishedPulling="2026-01-30 21:49:28.454153243 +0000 UTC m=+1957.246054509" observedRunningTime="2026-01-30 21:49:33.641067188 +0000 UTC m=+1962.432968434" watchObservedRunningTime="2026-01-30 21:49:33.660760349 +0000 UTC m=+1962.452661595" Jan 30 21:49:33 crc kubenswrapper[4721]: I0130 21:49:33.665087 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-gr825" podStartSLOduration=6.998324362 podStartE2EDuration="1m54.665072962s" podCreationTimestamp="2026-01-30 21:47:39 +0000 UTC" firstStartedPulling="2026-01-30 21:47:40.785369749 +0000 UTC m=+1849.577270995" lastFinishedPulling="2026-01-30 21:49:28.452118339 +0000 UTC m=+1957.244019595" observedRunningTime="2026-01-30 21:49:33.658436767 +0000 UTC m=+1962.450338043" watchObservedRunningTime="2026-01-30 21:49:33.665072962 +0000 UTC m=+1962.456974208" Jan 30 21:49:35 crc kubenswrapper[4721]: I0130 21:49:35.644231 4721 generic.go:334] "Generic (PLEG): container finished" podID="aa298585-a353-4910-9cb2-6527745b5811" containerID="e4ef169e954c73d5d9db72483bd974c94e99b9061ee448c7ede797d1a4c31731" exitCode=0 Jan 30 21:49:35 crc kubenswrapper[4721]: I0130 21:49:35.644282 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5kbhd" event={"ID":"aa298585-a353-4910-9cb2-6527745b5811","Type":"ContainerDied","Data":"e4ef169e954c73d5d9db72483bd974c94e99b9061ee448c7ede797d1a4c31731"} Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.303058 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.499219 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-fernet-keys\") pod \"aa298585-a353-4910-9cb2-6527745b5811\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.499326 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-credential-keys\") pod \"aa298585-a353-4910-9cb2-6527745b5811\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.499448 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cbzd\" (UniqueName: \"kubernetes.io/projected/aa298585-a353-4910-9cb2-6527745b5811-kube-api-access-7cbzd\") pod \"aa298585-a353-4910-9cb2-6527745b5811\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.499493 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-combined-ca-bundle\") pod \"aa298585-a353-4910-9cb2-6527745b5811\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.499618 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-config-data\") pod \"aa298585-a353-4910-9cb2-6527745b5811\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.499650 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-scripts\") pod \"aa298585-a353-4910-9cb2-6527745b5811\" (UID: \"aa298585-a353-4910-9cb2-6527745b5811\") " Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.505251 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-scripts" (OuterVolumeSpecName: "scripts") pod "aa298585-a353-4910-9cb2-6527745b5811" (UID: "aa298585-a353-4910-9cb2-6527745b5811"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.506075 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "aa298585-a353-4910-9cb2-6527745b5811" (UID: "aa298585-a353-4910-9cb2-6527745b5811"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.506100 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aa298585-a353-4910-9cb2-6527745b5811" (UID: "aa298585-a353-4910-9cb2-6527745b5811"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.506930 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa298585-a353-4910-9cb2-6527745b5811-kube-api-access-7cbzd" (OuterVolumeSpecName: "kube-api-access-7cbzd") pod "aa298585-a353-4910-9cb2-6527745b5811" (UID: "aa298585-a353-4910-9cb2-6527745b5811"). InnerVolumeSpecName "kube-api-access-7cbzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.534008 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa298585-a353-4910-9cb2-6527745b5811" (UID: "aa298585-a353-4910-9cb2-6527745b5811"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.548599 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-config-data" (OuterVolumeSpecName: "config-data") pod "aa298585-a353-4910-9cb2-6527745b5811" (UID: "aa298585-a353-4910-9cb2-6527745b5811"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.602426 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cbzd\" (UniqueName: \"kubernetes.io/projected/aa298585-a353-4910-9cb2-6527745b5811-kube-api-access-7cbzd\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.602656 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.602772 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.602880 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.602975 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.603062 4721 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa298585-a353-4910-9cb2-6527745b5811-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.669986 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5kbhd" event={"ID":"aa298585-a353-4910-9cb2-6527745b5811","Type":"ContainerDied","Data":"d979daaf70b81cddd2ad51601829d582b75fb9fd421708dfbe9423cc6487086d"} Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.670036 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d979daaf70b81cddd2ad51601829d582b75fb9fd421708dfbe9423cc6487086d" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.670074 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5kbhd" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.673391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e7509b-d406-4a52-b5e9-6ba1589d2217","Type":"ContainerStarted","Data":"495884936e66f0212054f41ae9c29eec370bd2355362ed832a6beb09366045eb"} Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.778956 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-54b7957dff-42xvx"] Jan 30 21:49:37 crc kubenswrapper[4721]: E0130 21:49:37.779786 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa298585-a353-4910-9cb2-6527745b5811" containerName="keystone-bootstrap" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.779982 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa298585-a353-4910-9cb2-6527745b5811" containerName="keystone-bootstrap" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.780231 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa298585-a353-4910-9cb2-6527745b5811" containerName="keystone-bootstrap" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.780962 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.783651 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.787762 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.787836 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.787860 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-qfzgk" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.788035 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.788049 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.797092 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54b7957dff-42xvx"] Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807393 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-config-data\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807445 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdlqj\" (UniqueName: \"kubernetes.io/projected/e94a796b-4938-444f-811d-dbba68141f41-kube-api-access-vdlqj\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807560 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-scripts\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807648 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-fernet-keys\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807682 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-credential-keys\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807713 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-public-tls-certs\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807779 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-internal-tls-certs\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.807809 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-combined-ca-bundle\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.908950 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-config-data\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909004 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdlqj\" (UniqueName: \"kubernetes.io/projected/e94a796b-4938-444f-811d-dbba68141f41-kube-api-access-vdlqj\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909104 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-scripts\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909155 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-fernet-keys\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909186 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-credential-keys\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-public-tls-certs\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909253 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-internal-tls-certs\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.909272 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-combined-ca-bundle\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.913610 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-combined-ca-bundle\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.914075 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-credential-keys\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.914964 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-fernet-keys\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.916207 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-config-data\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.916737 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-internal-tls-certs\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.917929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-public-tls-certs\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.923625 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a796b-4938-444f-811d-dbba68141f41-scripts\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:37 crc kubenswrapper[4721]: I0130 21:49:37.933097 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdlqj\" (UniqueName: \"kubernetes.io/projected/e94a796b-4938-444f-811d-dbba68141f41-kube-api-access-vdlqj\") pod \"keystone-54b7957dff-42xvx\" (UID: \"e94a796b-4938-444f-811d-dbba68141f41\") " pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:38 crc kubenswrapper[4721]: I0130 21:49:38.142740 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:38 crc kubenswrapper[4721]: I0130 21:49:38.643658 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54b7957dff-42xvx"] Jan 30 21:49:38 crc kubenswrapper[4721]: I0130 21:49:38.687856 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54b7957dff-42xvx" event={"ID":"e94a796b-4938-444f-811d-dbba68141f41","Type":"ContainerStarted","Data":"f264d163ced3d8e6f00bd521a7590d99b700ef0f27a3f0686be7cfdf5e91866a"} Jan 30 21:49:39 crc kubenswrapper[4721]: I0130 21:49:39.699537 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54b7957dff-42xvx" event={"ID":"e94a796b-4938-444f-811d-dbba68141f41","Type":"ContainerStarted","Data":"dda9c83832a6a51888715f5134b5fe1a1e04402e6f5b7a656c8a590b39d7b7df"} Jan 30 21:49:39 crc kubenswrapper[4721]: I0130 21:49:39.699991 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:49:39 crc kubenswrapper[4721]: I0130 21:49:39.737576 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-54b7957dff-42xvx" podStartSLOduration=2.7375524479999997 podStartE2EDuration="2.737552448s" podCreationTimestamp="2026-01-30 21:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:49:39.718641552 +0000 UTC m=+1968.510542818" watchObservedRunningTime="2026-01-30 21:49:39.737552448 +0000 UTC m=+1968.529453694" Jan 30 21:49:52 crc kubenswrapper[4721]: E0130 21:49:52.408577 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Jan 30 21:49:52 crc kubenswrapper[4721]: E0130 21:49:52.409413 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v9b82,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(f6e7509b-d406-4a52-b5e9-6ba1589d2217): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 21:49:52 crc kubenswrapper[4721]: E0130 21:49:52.410747 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" Jan 30 21:49:52 crc kubenswrapper[4721]: I0130 21:49:52.854395 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="ceilometer-notification-agent" containerID="cri-o://0743ad15c2a957b56e9a7cdef78066074f4cf54d60fc58b8a47459a84cb76faf" gracePeriod=30 Jan 30 21:49:52 crc kubenswrapper[4721]: I0130 21:49:52.854466 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="sg-core" containerID="cri-o://495884936e66f0212054f41ae9c29eec370bd2355362ed832a6beb09366045eb" gracePeriod=30 Jan 30 21:49:53 crc kubenswrapper[4721]: I0130 21:49:53.868170 4721 generic.go:334] "Generic (PLEG): container finished" podID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerID="495884936e66f0212054f41ae9c29eec370bd2355362ed832a6beb09366045eb" exitCode=2 Jan 30 21:49:53 crc kubenswrapper[4721]: I0130 21:49:53.868218 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e7509b-d406-4a52-b5e9-6ba1589d2217","Type":"ContainerDied","Data":"495884936e66f0212054f41ae9c29eec370bd2355362ed832a6beb09366045eb"} Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.881735 4721 generic.go:334] "Generic (PLEG): container finished" podID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerID="0743ad15c2a957b56e9a7cdef78066074f4cf54d60fc58b8a47459a84cb76faf" exitCode=0 Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.881801 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e7509b-d406-4a52-b5e9-6ba1589d2217","Type":"ContainerDied","Data":"0743ad15c2a957b56e9a7cdef78066074f4cf54d60fc58b8a47459a84cb76faf"} Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.883400 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e7509b-d406-4a52-b5e9-6ba1589d2217","Type":"ContainerDied","Data":"3f140070e36a0d36b8d86a392a0b554898926f2130e97551141ac97742c6a127"} Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.883465 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f140070e36a0d36b8d86a392a0b554898926f2130e97551141ac97742c6a127" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.906422 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973284 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9b82\" (UniqueName: \"kubernetes.io/projected/f6e7509b-d406-4a52-b5e9-6ba1589d2217-kube-api-access-v9b82\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973349 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-run-httpd\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973371 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-combined-ca-bundle\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973451 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-config-data\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973506 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-scripts\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973528 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-sg-core-conf-yaml\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-log-httpd\") pod \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\" (UID: \"f6e7509b-d406-4a52-b5e9-6ba1589d2217\") " Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.973925 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.974004 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.974230 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.974246 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e7509b-d406-4a52-b5e9-6ba1589d2217-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.982524 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-scripts" (OuterVolumeSpecName: "scripts") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.982772 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6e7509b-d406-4a52-b5e9-6ba1589d2217-kube-api-access-v9b82" (OuterVolumeSpecName: "kube-api-access-v9b82") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "kube-api-access-v9b82". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:49:54 crc kubenswrapper[4721]: I0130 21:49:54.999523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-config-data" (OuterVolumeSpecName: "config-data") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.001097 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.011446 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6e7509b-d406-4a52-b5e9-6ba1589d2217" (UID: "f6e7509b-d406-4a52-b5e9-6ba1589d2217"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.076771 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.076813 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.076825 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.076847 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9b82\" (UniqueName: \"kubernetes.io/projected/f6e7509b-d406-4a52-b5e9-6ba1589d2217-kube-api-access-v9b82\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.076860 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e7509b-d406-4a52-b5e9-6ba1589d2217-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.890868 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.945319 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.954836 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.990939 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:55 crc kubenswrapper[4721]: E0130 21:49:55.991489 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="ceilometer-notification-agent" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.991506 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="ceilometer-notification-agent" Jan 30 21:49:55 crc kubenswrapper[4721]: E0130 21:49:55.991541 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="sg-core" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.991548 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="sg-core" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.991798 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="sg-core" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.991816 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" containerName="ceilometer-notification-agent" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.993750 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.995962 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:49:55 crc kubenswrapper[4721]: I0130 21:49:55.996266 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.007976 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.083027 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:56 crc kubenswrapper[4721]: E0130 21:49:56.084042 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-mlqdc log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[combined-ca-bundle config-data kube-api-access-mlqdc log-httpd run-httpd scripts sg-core-conf-yaml]: context canceled" pod="openstack/ceilometer-0" podUID="efc1d294-630d-492d-a199-d988c24f5322" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.094844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-config-data\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.097060 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-log-httpd\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.097311 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.097455 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-scripts\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.097586 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-run-httpd\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.097735 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlqdc\" (UniqueName: \"kubernetes.io/projected/efc1d294-630d-492d-a199-d988c24f5322-kube-api-access-mlqdc\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.097932 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.108092 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6e7509b-d406-4a52-b5e9-6ba1589d2217" path="/var/lib/kubelet/pods/f6e7509b-d406-4a52-b5e9-6ba1589d2217/volumes" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.200184 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.200356 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-config-data\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.200396 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-log-httpd\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.200782 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.200916 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-scripts\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.201386 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-log-httpd\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.202098 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-run-httpd\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.202217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlqdc\" (UniqueName: \"kubernetes.io/projected/efc1d294-630d-492d-a199-d988c24f5322-kube-api-access-mlqdc\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.202719 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-run-httpd\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.206701 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.207489 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.208959 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-config-data\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.222200 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-scripts\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.225052 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlqdc\" (UniqueName: \"kubernetes.io/projected/efc1d294-630d-492d-a199-d988c24f5322-kube-api-access-mlqdc\") pod \"ceilometer-0\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.898373 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:56 crc kubenswrapper[4721]: I0130 21:49:56.911073 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.015790 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-sg-core-conf-yaml\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.015892 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-scripts\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016069 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-log-httpd\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016160 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-config-data\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016198 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-combined-ca-bundle\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016247 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlqdc\" (UniqueName: \"kubernetes.io/projected/efc1d294-630d-492d-a199-d988c24f5322-kube-api-access-mlqdc\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016274 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-run-httpd\") pod \"efc1d294-630d-492d-a199-d988c24f5322\" (UID: \"efc1d294-630d-492d-a199-d988c24f5322\") " Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016407 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.016772 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.017670 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.017686 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efc1d294-630d-492d-a199-d988c24f5322-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.020820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc1d294-630d-492d-a199-d988c24f5322-kube-api-access-mlqdc" (OuterVolumeSpecName: "kube-api-access-mlqdc") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "kube-api-access-mlqdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.021866 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-scripts" (OuterVolumeSpecName: "scripts") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.021921 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-config-data" (OuterVolumeSpecName: "config-data") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.023102 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.024557 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "efc1d294-630d-492d-a199-d988c24f5322" (UID: "efc1d294-630d-492d-a199-d988c24f5322"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.120987 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.121028 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.121046 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlqdc\" (UniqueName: \"kubernetes.io/projected/efc1d294-630d-492d-a199-d988c24f5322-kube-api-access-mlqdc\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.121060 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.121076 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efc1d294-630d-492d-a199-d988c24f5322-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.906498 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:57 crc kubenswrapper[4721]: I0130 21:49:57.983767 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.003940 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.021244 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.023795 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.026863 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.027042 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.030777 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.105432 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc1d294-630d-492d-a199-d988c24f5322" path="/var/lib/kubelet/pods/efc1d294-630d-492d-a199-d988c24f5322/volumes" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138475 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-log-httpd\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138541 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-config-data\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138565 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-scripts\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138625 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-run-httpd\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138652 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138671 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.138774 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf5tw\" (UniqueName: \"kubernetes.io/projected/ec6217ba-45b0-4472-a464-b389ae7b872f-kube-api-access-gf5tw\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240216 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf5tw\" (UniqueName: \"kubernetes.io/projected/ec6217ba-45b0-4472-a464-b389ae7b872f-kube-api-access-gf5tw\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240334 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-log-httpd\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240358 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-config-data\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240379 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-scripts\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-run-httpd\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240455 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.240477 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.242714 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-log-httpd\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.243389 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-run-httpd\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.257969 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.258202 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.258333 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-scripts\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.259265 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-config-data\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.266120 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf5tw\" (UniqueName: \"kubernetes.io/projected/ec6217ba-45b0-4472-a464-b389ae7b872f-kube-api-access-gf5tw\") pod \"ceilometer-0\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.351371 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.799675 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.915960 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerStarted","Data":"0f9dd8db472650eb973b106f80a5cac96a80f34088639cf276740b773b992190"} Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.917583 4721 generic.go:334] "Generic (PLEG): container finished" podID="41614e72-cf2f-43c9-a879-f4c76ff277d5" containerID="77b05f5f7abd17e94a5cbc6eb3c197cabe6aff5802e6809e1473bb0d66d5287e" exitCode=0 Jan 30 21:49:58 crc kubenswrapper[4721]: I0130 21:49:58.917615 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnhr" event={"ID":"41614e72-cf2f-43c9-a879-f4c76ff277d5","Type":"ContainerDied","Data":"77b05f5f7abd17e94a5cbc6eb3c197cabe6aff5802e6809e1473bb0d66d5287e"} Jan 30 21:49:59 crc kubenswrapper[4721]: I0130 21:49:59.448285 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:49:59 crc kubenswrapper[4721]: I0130 21:49:59.448391 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:49:59 crc kubenswrapper[4721]: I0130 21:49:59.927375 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerStarted","Data":"54c498dc7adc7dcf0952499beee857eb48d868a2f5536440590dc0c5ce441c9c"} Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.304319 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnhr" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.383245 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gkml\" (UniqueName: \"kubernetes.io/projected/41614e72-cf2f-43c9-a879-f4c76ff277d5-kube-api-access-4gkml\") pod \"41614e72-cf2f-43c9-a879-f4c76ff277d5\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.383655 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-config-data\") pod \"41614e72-cf2f-43c9-a879-f4c76ff277d5\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.383828 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-scripts\") pod \"41614e72-cf2f-43c9-a879-f4c76ff277d5\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.383999 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-combined-ca-bundle\") pod \"41614e72-cf2f-43c9-a879-f4c76ff277d5\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.384150 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41614e72-cf2f-43c9-a879-f4c76ff277d5-logs\") pod \"41614e72-cf2f-43c9-a879-f4c76ff277d5\" (UID: \"41614e72-cf2f-43c9-a879-f4c76ff277d5\") " Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.384430 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41614e72-cf2f-43c9-a879-f4c76ff277d5-logs" (OuterVolumeSpecName: "logs") pod "41614e72-cf2f-43c9-a879-f4c76ff277d5" (UID: "41614e72-cf2f-43c9-a879-f4c76ff277d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.384907 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41614e72-cf2f-43c9-a879-f4c76ff277d5-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.397207 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41614e72-cf2f-43c9-a879-f4c76ff277d5-kube-api-access-4gkml" (OuterVolumeSpecName: "kube-api-access-4gkml") pod "41614e72-cf2f-43c9-a879-f4c76ff277d5" (UID: "41614e72-cf2f-43c9-a879-f4c76ff277d5"). InnerVolumeSpecName "kube-api-access-4gkml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.397250 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-scripts" (OuterVolumeSpecName: "scripts") pod "41614e72-cf2f-43c9-a879-f4c76ff277d5" (UID: "41614e72-cf2f-43c9-a879-f4c76ff277d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.410672 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41614e72-cf2f-43c9-a879-f4c76ff277d5" (UID: "41614e72-cf2f-43c9-a879-f4c76ff277d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.411670 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-config-data" (OuterVolumeSpecName: "config-data") pod "41614e72-cf2f-43c9-a879-f4c76ff277d5" (UID: "41614e72-cf2f-43c9-a879-f4c76ff277d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.486979 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gkml\" (UniqueName: \"kubernetes.io/projected/41614e72-cf2f-43c9-a879-f4c76ff277d5-kube-api-access-4gkml\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.487027 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.487043 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.487057 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41614e72-cf2f-43c9-a879-f4c76ff277d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.939094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnhr" event={"ID":"41614e72-cf2f-43c9-a879-f4c76ff277d5","Type":"ContainerDied","Data":"7adfe4bc614600a0acf524f822eacdf170c431970119cc76426bd1216b3e22bb"} Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.939626 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7adfe4bc614600a0acf524f822eacdf170c431970119cc76426bd1216b3e22bb" Jan 30 21:50:00 crc kubenswrapper[4721]: I0130 21:50:00.939376 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnhr" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.036324 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5c759f49d6-k22ln"] Jan 30 21:50:01 crc kubenswrapper[4721]: E0130 21:50:01.036831 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41614e72-cf2f-43c9-a879-f4c76ff277d5" containerName="placement-db-sync" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.036855 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="41614e72-cf2f-43c9-a879-f4c76ff277d5" containerName="placement-db-sync" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.037078 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="41614e72-cf2f-43c9-a879-f4c76ff277d5" containerName="placement-db-sync" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.038409 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.043997 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.044215 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.044039 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-h627x" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.044372 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.044061 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.056797 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5c759f49d6-k22ln"] Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.201901 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9def88bd-3017-4ea9-8a12-b895aeb4b28f-logs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.202038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-internal-tls-certs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.202078 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-public-tls-certs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.202122 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-config-data\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.202175 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-combined-ca-bundle\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.202234 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79jj8\" (UniqueName: \"kubernetes.io/projected/9def88bd-3017-4ea9-8a12-b895aeb4b28f-kube-api-access-79jj8\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.202266 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-scripts\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304025 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-combined-ca-bundle\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79jj8\" (UniqueName: \"kubernetes.io/projected/9def88bd-3017-4ea9-8a12-b895aeb4b28f-kube-api-access-79jj8\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304163 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-scripts\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304255 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9def88bd-3017-4ea9-8a12-b895aeb4b28f-logs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304339 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-internal-tls-certs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304372 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-public-tls-certs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.304410 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-config-data\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.306841 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9def88bd-3017-4ea9-8a12-b895aeb4b28f-logs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.316363 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-internal-tls-certs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.316923 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-combined-ca-bundle\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.317974 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-scripts\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.318678 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-config-data\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.318963 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9def88bd-3017-4ea9-8a12-b895aeb4b28f-public-tls-certs\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.323480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79jj8\" (UniqueName: \"kubernetes.io/projected/9def88bd-3017-4ea9-8a12-b895aeb4b28f-kube-api-access-79jj8\") pod \"placement-5c759f49d6-k22ln\" (UID: \"9def88bd-3017-4ea9-8a12-b895aeb4b28f\") " pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.357923 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:01 crc kubenswrapper[4721]: I0130 21:50:01.950866 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerStarted","Data":"05846b20f1a784244e1256c20804886049c326803873082b572b948348aede3c"} Jan 30 21:50:02 crc kubenswrapper[4721]: I0130 21:50:02.076328 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5c759f49d6-k22ln"] Jan 30 21:50:02 crc kubenswrapper[4721]: W0130 21:50:02.079907 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9def88bd_3017_4ea9_8a12_b895aeb4b28f.slice/crio-b94a4e832cd8933e8c914b60aa0c0c2d957a7667bef54366c570ea6d69afce01 WatchSource:0}: Error finding container b94a4e832cd8933e8c914b60aa0c0c2d957a7667bef54366c570ea6d69afce01: Status 404 returned error can't find the container with id b94a4e832cd8933e8c914b60aa0c0c2d957a7667bef54366c570ea6d69afce01 Jan 30 21:50:02 crc kubenswrapper[4721]: I0130 21:50:02.960561 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c759f49d6-k22ln" event={"ID":"9def88bd-3017-4ea9-8a12-b895aeb4b28f","Type":"ContainerStarted","Data":"668beb856499f00d8fede6c16ec691872a2843fce7b50091f4a39a47ecbf1c69"} Jan 30 21:50:02 crc kubenswrapper[4721]: I0130 21:50:02.960839 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c759f49d6-k22ln" event={"ID":"9def88bd-3017-4ea9-8a12-b895aeb4b28f","Type":"ContainerStarted","Data":"b94a4e832cd8933e8c914b60aa0c0c2d957a7667bef54366c570ea6d69afce01"} Jan 30 21:50:02 crc kubenswrapper[4721]: I0130 21:50:02.962923 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerStarted","Data":"9fc1b258d9893762813bf34eff9e539ff558bc5c12e6d75eec578cd3036607c3"} Jan 30 21:50:03 crc kubenswrapper[4721]: I0130 21:50:03.977628 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c759f49d6-k22ln" event={"ID":"9def88bd-3017-4ea9-8a12-b895aeb4b28f","Type":"ContainerStarted","Data":"62a9779da7b98cf9ef7d4b14d3975287c7fb3a1013fbc44872fb8babd48173d9"} Jan 30 21:50:03 crc kubenswrapper[4721]: I0130 21:50:03.978065 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:03 crc kubenswrapper[4721]: I0130 21:50:03.978083 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:04 crc kubenswrapper[4721]: I0130 21:50:04.014044 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5c759f49d6-k22ln" podStartSLOduration=3.01402312 podStartE2EDuration="3.01402312s" podCreationTimestamp="2026-01-30 21:50:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:04.007020603 +0000 UTC m=+1992.798921849" watchObservedRunningTime="2026-01-30 21:50:04.01402312 +0000 UTC m=+1992.805924366" Jan 30 21:50:06 crc kubenswrapper[4721]: I0130 21:50:05.999552 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerStarted","Data":"2794980561bb3f8a50f27e22f17663fafa3ec6d2d78d4d797ccb92b725ff23b3"} Jan 30 21:50:06 crc kubenswrapper[4721]: I0130 21:50:06.001344 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:50:06 crc kubenswrapper[4721]: I0130 21:50:06.003087 4721 generic.go:334] "Generic (PLEG): container finished" podID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" containerID="b412d308c94e402ce85900f21e5966c4b248167bddfe3a2f59825b7513dd9036" exitCode=0 Jan 30 21:50:06 crc kubenswrapper[4721]: I0130 21:50:06.003113 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gr825" event={"ID":"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76","Type":"ContainerDied","Data":"b412d308c94e402ce85900f21e5966c4b248167bddfe3a2f59825b7513dd9036"} Jan 30 21:50:06 crc kubenswrapper[4721]: I0130 21:50:06.027988 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.750752896 podStartE2EDuration="9.027972102s" podCreationTimestamp="2026-01-30 21:49:57 +0000 UTC" firstStartedPulling="2026-01-30 21:49:58.800743657 +0000 UTC m=+1987.592644903" lastFinishedPulling="2026-01-30 21:50:05.077962863 +0000 UTC m=+1993.869864109" observedRunningTime="2026-01-30 21:50:06.024806454 +0000 UTC m=+1994.816707700" watchObservedRunningTime="2026-01-30 21:50:06.027972102 +0000 UTC m=+1994.819873348" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.404078 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gr825" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.543678 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-combined-ca-bundle\") pod \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.543929 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-db-sync-config-data\") pod \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.544016 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nktdd\" (UniqueName: \"kubernetes.io/projected/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-kube-api-access-nktdd\") pod \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\" (UID: \"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76\") " Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.548682 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" (UID: "bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.572717 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-kube-api-access-nktdd" (OuterVolumeSpecName: "kube-api-access-nktdd") pod "bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" (UID: "bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76"). InnerVolumeSpecName "kube-api-access-nktdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.577004 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" (UID: "bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.646178 4721 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.646429 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nktdd\" (UniqueName: \"kubernetes.io/projected/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-kube-api-access-nktdd\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:07 crc kubenswrapper[4721]: I0130 21:50:07.646441 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.031051 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gr825" event={"ID":"bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76","Type":"ContainerDied","Data":"3f899a57976ffed630242501b2bb5a94bbd4cc330cc5701e799f00370611b478"} Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.031372 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f899a57976ffed630242501b2bb5a94bbd4cc330cc5701e799f00370611b478" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.031215 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gr825" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.501953 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6456f55dbc-h7p5b"] Jan 30 21:50:08 crc kubenswrapper[4721]: E0130 21:50:08.502463 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" containerName="barbican-db-sync" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.502480 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" containerName="barbican-db-sync" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.502707 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" containerName="barbican-db-sync" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.504138 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.510645 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.511365 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.512738 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2qsxv" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.524646 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6456f55dbc-h7p5b"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.558578 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6db69d5bd6-grvs7"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.560200 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.564623 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.597374 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6db69d5bd6-grvs7"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670321 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-combined-ca-bundle\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670389 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/925d81d0-2e07-4a27-a8d1-7edff62fe070-logs\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670434 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7457bf9f-e5dd-47af-9346-898a62273a3a-logs\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670476 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-config-data\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670499 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-config-data\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670527 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-config-data-custom\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670549 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-config-data-custom\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670585 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlmxx\" (UniqueName: \"kubernetes.io/projected/7457bf9f-e5dd-47af-9346-898a62273a3a-kube-api-access-jlmxx\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670610 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-combined-ca-bundle\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.670633 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl2zq\" (UniqueName: \"kubernetes.io/projected/925d81d0-2e07-4a27-a8d1-7edff62fe070-kube-api-access-bl2zq\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.689358 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-zcp2h"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.690973 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.718613 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-zcp2h"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.775984 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-config-data\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.777583 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-config-data\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.777674 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-config-data-custom\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.777710 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-config-data-custom\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.777790 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlmxx\" (UniqueName: \"kubernetes.io/projected/7457bf9f-e5dd-47af-9346-898a62273a3a-kube-api-access-jlmxx\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.777865 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-combined-ca-bundle\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.777924 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl2zq\" (UniqueName: \"kubernetes.io/projected/925d81d0-2e07-4a27-a8d1-7edff62fe070-kube-api-access-bl2zq\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.778583 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-combined-ca-bundle\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.778712 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/925d81d0-2e07-4a27-a8d1-7edff62fe070-logs\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.778831 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7457bf9f-e5dd-47af-9346-898a62273a3a-logs\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.779852 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7457bf9f-e5dd-47af-9346-898a62273a3a-logs\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.783190 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/925d81d0-2e07-4a27-a8d1-7edff62fe070-logs\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.784499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-config-data\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.787158 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-config-data\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.792365 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-c6f84b47b-7hn7s"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.794409 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.794802 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-combined-ca-bundle\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.795375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-config-data-custom\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.795711 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/925d81d0-2e07-4a27-a8d1-7edff62fe070-config-data-custom\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.798737 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.799095 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl2zq\" (UniqueName: \"kubernetes.io/projected/925d81d0-2e07-4a27-a8d1-7edff62fe070-kube-api-access-bl2zq\") pod \"barbican-keystone-listener-6db69d5bd6-grvs7\" (UID: \"925d81d0-2e07-4a27-a8d1-7edff62fe070\") " pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.801279 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7457bf9f-e5dd-47af-9346-898a62273a3a-combined-ca-bundle\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.806424 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c6f84b47b-7hn7s"] Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.817940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlmxx\" (UniqueName: \"kubernetes.io/projected/7457bf9f-e5dd-47af-9346-898a62273a3a-kube-api-access-jlmxx\") pod \"barbican-worker-6456f55dbc-h7p5b\" (UID: \"7457bf9f-e5dd-47af-9346-898a62273a3a\") " pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.849810 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6456f55dbc-h7p5b" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.880897 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.880973 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.881028 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.881167 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-config\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.881216 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2h49\" (UniqueName: \"kubernetes.io/projected/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-kube-api-access-w2h49\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.881473 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.887902 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983157 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983213 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-logs\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983243 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983272 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983326 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk7vs\" (UniqueName: \"kubernetes.io/projected/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-kube-api-access-kk7vs\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983356 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-combined-ca-bundle\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983373 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983417 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-config\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983442 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2h49\" (UniqueName: \"kubernetes.io/projected/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-kube-api-access-w2h49\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983465 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.983497 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data-custom\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.984368 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.984459 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.984476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.984885 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-config\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:08 crc kubenswrapper[4721]: I0130 21:50:08.985008 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.047282 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2h49\" (UniqueName: \"kubernetes.io/projected/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-kube-api-access-w2h49\") pod \"dnsmasq-dns-586bdc5f9-zcp2h\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.100721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-logs\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.101269 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk7vs\" (UniqueName: \"kubernetes.io/projected/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-kube-api-access-kk7vs\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.101405 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-combined-ca-bundle\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.101450 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.101702 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data-custom\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.105197 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-logs\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.110103 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.118050 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-combined-ca-bundle\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.129043 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data-custom\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.145505 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk7vs\" (UniqueName: \"kubernetes.io/projected/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-kube-api-access-kk7vs\") pod \"barbican-api-c6f84b47b-7hn7s\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.325888 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.379524 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.507038 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6456f55dbc-h7p5b"] Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.686910 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6db69d5bd6-grvs7"] Jan 30 21:50:09 crc kubenswrapper[4721]: W0130 21:50:09.689614 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod925d81d0_2e07_4a27_a8d1_7edff62fe070.slice/crio-687f8a907265a906fb14dffcbe7a4e36b2813f1bd9cd3a045d4d626faee1d33c WatchSource:0}: Error finding container 687f8a907265a906fb14dffcbe7a4e36b2813f1bd9cd3a045d4d626faee1d33c: Status 404 returned error can't find the container with id 687f8a907265a906fb14dffcbe7a4e36b2813f1bd9cd3a045d4d626faee1d33c Jan 30 21:50:09 crc kubenswrapper[4721]: I0130 21:50:09.833447 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c6f84b47b-7hn7s"] Jan 30 21:50:10 crc kubenswrapper[4721]: I0130 21:50:10.112185 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6456f55dbc-h7p5b" event={"ID":"7457bf9f-e5dd-47af-9346-898a62273a3a","Type":"ContainerStarted","Data":"6d8b29b7d940fcb71ae24941a6e31c5edb96711eb975ecf18fc2cc2a4d3d7578"} Jan 30 21:50:10 crc kubenswrapper[4721]: I0130 21:50:10.114465 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6f84b47b-7hn7s" event={"ID":"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469","Type":"ContainerStarted","Data":"fec2543a82c3557234970a7d37ea09f63c67db297b6a9e56f0a630799b5e6841"} Jan 30 21:50:10 crc kubenswrapper[4721]: I0130 21:50:10.116163 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" event={"ID":"925d81d0-2e07-4a27-a8d1-7edff62fe070","Type":"ContainerStarted","Data":"687f8a907265a906fb14dffcbe7a4e36b2813f1bd9cd3a045d4d626faee1d33c"} Jan 30 21:50:10 crc kubenswrapper[4721]: I0130 21:50:10.128915 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-zcp2h"] Jan 30 21:50:10 crc kubenswrapper[4721]: W0130 21:50:10.143164 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8aeedebb_2a13_44dd_b9a2_add4ee0b94bb.slice/crio-415b7988b793b8d7deb3aa97c6959bab027e0fdce13e1e2aa5083cc5794e38c0 WatchSource:0}: Error finding container 415b7988b793b8d7deb3aa97c6959bab027e0fdce13e1e2aa5083cc5794e38c0: Status 404 returned error can't find the container with id 415b7988b793b8d7deb3aa97c6959bab027e0fdce13e1e2aa5083cc5794e38c0 Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.139226 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-54b7957dff-42xvx" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.141556 4721 generic.go:334] "Generic (PLEG): container finished" podID="fbe7c436-51b8-4985-933d-fde2a16767bd" containerID="569ac664f271273d28adff4b507b3b3f20c4031671b43bf4a29877af6b05c6b5" exitCode=0 Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.141619 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-b98rm" event={"ID":"fbe7c436-51b8-4985-933d-fde2a16767bd","Type":"ContainerDied","Data":"569ac664f271273d28adff4b507b3b3f20c4031671b43bf4a29877af6b05c6b5"} Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.148391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6f84b47b-7hn7s" event={"ID":"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469","Type":"ContainerStarted","Data":"38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4"} Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.148432 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6f84b47b-7hn7s" event={"ID":"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469","Type":"ContainerStarted","Data":"b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712"} Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.148452 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.148476 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.151350 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerID="916ec534266fce7926d19495d65c66267422e59196791f93aa56cf68c21a45f2" exitCode=0 Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.151450 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" event={"ID":"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb","Type":"ContainerDied","Data":"916ec534266fce7926d19495d65c66267422e59196791f93aa56cf68c21a45f2"} Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.151493 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" event={"ID":"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb","Type":"ContainerStarted","Data":"415b7988b793b8d7deb3aa97c6959bab027e0fdce13e1e2aa5083cc5794e38c0"} Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.155160 4721 generic.go:334] "Generic (PLEG): container finished" podID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" containerID="ca7aed5bf67815d1ae51471ebe7450ee976b5e2dfd0f0c7ef3e3b8ea9159b7e8" exitCode=0 Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.155216 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-k4phl" event={"ID":"710ef32f-2c64-4aea-a0d4-ea18b41e4f10","Type":"ContainerDied","Data":"ca7aed5bf67815d1ae51471ebe7450ee976b5e2dfd0f0c7ef3e3b8ea9159b7e8"} Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.248760 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-c6f84b47b-7hn7s" podStartSLOduration=3.248739039 podStartE2EDuration="3.248739039s" podCreationTimestamp="2026-01-30 21:50:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:11.228852492 +0000 UTC m=+2000.020753748" watchObservedRunningTime="2026-01-30 21:50:11.248739039 +0000 UTC m=+2000.040640285" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.669023 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-68b6df574b-x89tq"] Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.671173 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.673182 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.673244 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.685349 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68b6df574b-x89tq"] Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788228 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-internal-tls-certs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788320 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28t6q\" (UniqueName: \"kubernetes.io/projected/e418bacc-47a2-45cd-9bb3-35e42563c482-kube-api-access-28t6q\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788405 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418bacc-47a2-45cd-9bb3-35e42563c482-logs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788499 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-public-tls-certs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788553 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-config-data-custom\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788587 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-config-data\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.788648 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-combined-ca-bundle\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890245 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-config-data-custom\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890622 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-config-data\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890667 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-combined-ca-bundle\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890692 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-internal-tls-certs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890731 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28t6q\" (UniqueName: \"kubernetes.io/projected/e418bacc-47a2-45cd-9bb3-35e42563c482-kube-api-access-28t6q\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418bacc-47a2-45cd-9bb3-35e42563c482-logs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.890837 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-public-tls-certs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.891192 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418bacc-47a2-45cd-9bb3-35e42563c482-logs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.895983 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-internal-tls-certs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.897128 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-config-data-custom\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.898537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-combined-ca-bundle\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.900792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-config-data\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.901937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e418bacc-47a2-45cd-9bb3-35e42563c482-public-tls-certs\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:11 crc kubenswrapper[4721]: I0130 21:50:11.923984 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28t6q\" (UniqueName: \"kubernetes.io/projected/e418bacc-47a2-45cd-9bb3-35e42563c482-kube-api-access-28t6q\") pod \"barbican-api-68b6df574b-x89tq\" (UID: \"e418bacc-47a2-45cd-9bb3-35e42563c482\") " pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:12 crc kubenswrapper[4721]: I0130 21:50:12.028933 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.128577 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.147847 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-k4phl" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.230153 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-b98rm" event={"ID":"fbe7c436-51b8-4985-933d-fde2a16767bd","Type":"ContainerDied","Data":"0668ba079f03cbff3281a2dd8184f095c960010b27be3e41543f871d66316e37"} Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.230195 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0668ba079f03cbff3281a2dd8184f095c960010b27be3e41543f871d66316e37" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.230270 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-b98rm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.243268 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-scripts\") pod \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.243370 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-etc-machine-id\") pod \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244315 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-combined-ca-bundle\") pod \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244405 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v94d8\" (UniqueName: \"kubernetes.io/projected/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-kube-api-access-v94d8\") pod \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244435 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-config-data\") pod \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244424 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "710ef32f-2c64-4aea-a0d4-ea18b41e4f10" (UID: "710ef32f-2c64-4aea-a0d4-ea18b41e4f10"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244672 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-db-sync-config-data\") pod \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\" (UID: \"710ef32f-2c64-4aea-a0d4-ea18b41e4f10\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244765 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-config-data\") pod \"fbe7c436-51b8-4985-933d-fde2a16767bd\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244808 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-combined-ca-bundle\") pod \"fbe7c436-51b8-4985-933d-fde2a16767bd\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244849 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-scripts\") pod \"fbe7c436-51b8-4985-933d-fde2a16767bd\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244893 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxqlc\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-kube-api-access-gxqlc\") pod \"fbe7c436-51b8-4985-933d-fde2a16767bd\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.244945 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-certs\") pod \"fbe7c436-51b8-4985-933d-fde2a16767bd\" (UID: \"fbe7c436-51b8-4985-933d-fde2a16767bd\") " Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.245809 4721 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.248742 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-k4phl" event={"ID":"710ef32f-2c64-4aea-a0d4-ea18b41e4f10","Type":"ContainerDied","Data":"ec124e0bd3fd71f6373da34adb0bad5dde0956133e764f252675e76a5ad99850"} Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.248797 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec124e0bd3fd71f6373da34adb0bad5dde0956133e764f252675e76a5ad99850" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.248863 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-k4phl" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.276009 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-kube-api-access-v94d8" (OuterVolumeSpecName: "kube-api-access-v94d8") pod "710ef32f-2c64-4aea-a0d4-ea18b41e4f10" (UID: "710ef32f-2c64-4aea-a0d4-ea18b41e4f10"). InnerVolumeSpecName "kube-api-access-v94d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.276341 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-certs" (OuterVolumeSpecName: "certs") pod "fbe7c436-51b8-4985-933d-fde2a16767bd" (UID: "fbe7c436-51b8-4985-933d-fde2a16767bd"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.280858 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-scripts" (OuterVolumeSpecName: "scripts") pod "710ef32f-2c64-4aea-a0d4-ea18b41e4f10" (UID: "710ef32f-2c64-4aea-a0d4-ea18b41e4f10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.283740 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-kube-api-access-gxqlc" (OuterVolumeSpecName: "kube-api-access-gxqlc") pod "fbe7c436-51b8-4985-933d-fde2a16767bd" (UID: "fbe7c436-51b8-4985-933d-fde2a16767bd"). InnerVolumeSpecName "kube-api-access-gxqlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.288627 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-scripts" (OuterVolumeSpecName: "scripts") pod "fbe7c436-51b8-4985-933d-fde2a16767bd" (UID: "fbe7c436-51b8-4985-933d-fde2a16767bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.289267 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "710ef32f-2c64-4aea-a0d4-ea18b41e4f10" (UID: "710ef32f-2c64-4aea-a0d4-ea18b41e4f10"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.338468 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-config-data" (OuterVolumeSpecName: "config-data") pod "fbe7c436-51b8-4985-933d-fde2a16767bd" (UID: "fbe7c436-51b8-4985-933d-fde2a16767bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.340322 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbe7c436-51b8-4985-933d-fde2a16767bd" (UID: "fbe7c436-51b8-4985-933d-fde2a16767bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348181 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348210 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxqlc\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-kube-api-access-gxqlc\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348222 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/fbe7c436-51b8-4985-933d-fde2a16767bd-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348230 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348240 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v94d8\" (UniqueName: \"kubernetes.io/projected/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-kube-api-access-v94d8\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348249 4721 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348259 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.348288 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe7c436-51b8-4985-933d-fde2a16767bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.358670 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "710ef32f-2c64-4aea-a0d4-ea18b41e4f10" (UID: "710ef32f-2c64-4aea-a0d4-ea18b41e4f10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.376161 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-qmlfm"] Jan 30 21:50:13 crc kubenswrapper[4721]: E0130 21:50:13.376713 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" containerName="cinder-db-sync" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.376738 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" containerName="cinder-db-sync" Jan 30 21:50:13 crc kubenswrapper[4721]: E0130 21:50:13.376777 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe7c436-51b8-4985-933d-fde2a16767bd" containerName="cloudkitty-db-sync" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.376785 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe7c436-51b8-4985-933d-fde2a16767bd" containerName="cloudkitty-db-sync" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.377067 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" containerName="cinder-db-sync" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.377099 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe7c436-51b8-4985-933d-fde2a16767bd" containerName="cloudkitty-db-sync" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.380017 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.389902 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68b6df574b-x89tq"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.405365 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-qmlfm"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.420468 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-config-data" (OuterVolumeSpecName: "config-data") pod "710ef32f-2c64-4aea-a0d4-ea18b41e4f10" (UID: "710ef32f-2c64-4aea-a0d4-ea18b41e4f10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453315 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-config-data\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453425 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-certs\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453463 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwgzs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-kube-api-access-rwgzs\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453514 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-scripts\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453573 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-combined-ca-bundle\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453669 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.453682 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/710ef32f-2c64-4aea-a0d4-ea18b41e4f10-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.558525 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-config-data\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.558634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-certs\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.558673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwgzs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-kube-api-access-rwgzs\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.558710 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-scripts\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.558744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-combined-ca-bundle\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.559120 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.564423 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.569857 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.570888 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-combined-ca-bundle\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.580853 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.581108 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-78jsp" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.581372 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.588155 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-certs\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.595000 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-scripts\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.595461 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.613891 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-config-data\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.624891 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwgzs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-kube-api-access-rwgzs\") pod \"cloudkitty-storageinit-qmlfm\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.648719 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-zcp2h"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.678909 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9w5c\" (UniqueName: \"kubernetes.io/projected/c56f8a6d-9d54-49cd-90e1-2f1003a54794-kube-api-access-s9w5c\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.679071 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c56f8a6d-9d54-49cd-90e1-2f1003a54794-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.679101 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-scripts\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.679144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.679262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.679289 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.708832 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-p8n56"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.710632 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.765363 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-p8n56"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.784338 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9w5c\" (UniqueName: \"kubernetes.io/projected/c56f8a6d-9d54-49cd-90e1-2f1003a54794-kube-api-access-s9w5c\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.784496 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c56f8a6d-9d54-49cd-90e1-2f1003a54794-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.784524 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-scripts\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.784576 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.784707 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.784732 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.788043 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c56f8a6d-9d54-49cd-90e1-2f1003a54794-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.791354 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.806480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.807287 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-scripts\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.807980 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.820308 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9w5c\" (UniqueName: \"kubernetes.io/projected/c56f8a6d-9d54-49cd-90e1-2f1003a54794-kube-api-access-s9w5c\") pod \"cinder-scheduler-0\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.820767 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.887020 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nwxm\" (UniqueName: \"kubernetes.io/projected/37728bd9-18e7-4391-a9ff-87c282bb1c72-kube-api-access-7nwxm\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.887099 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.887220 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.887347 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.887403 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.887522 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-config\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.920456 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.981590 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.983235 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.987083 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.993183 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.993395 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-config\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.993548 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nwxm\" (UniqueName: \"kubernetes.io/projected/37728bd9-18e7-4391-a9ff-87c282bb1c72-kube-api-access-7nwxm\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.993680 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.993859 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.994019 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.994227 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.994971 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.995646 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.996261 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.996555 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:13 crc kubenswrapper[4721]: I0130 21:50:13.997006 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-config\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.050229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nwxm\" (UniqueName: \"kubernetes.io/projected/37728bd9-18e7-4391-a9ff-87c282bb1c72-kube-api-access-7nwxm\") pod \"dnsmasq-dns-795f4db4bc-p8n56\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.098502 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5r7w\" (UniqueName: \"kubernetes.io/projected/6174475a-ddff-44cc-bd53-014017041b14-kube-api-access-l5r7w\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.098963 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6174475a-ddff-44cc-bd53-014017041b14-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.098999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-scripts\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.099042 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6174475a-ddff-44cc-bd53-014017041b14-logs\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.099094 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data-custom\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.099155 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.099198 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.198864 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200571 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5r7w\" (UniqueName: \"kubernetes.io/projected/6174475a-ddff-44cc-bd53-014017041b14-kube-api-access-l5r7w\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200671 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6174475a-ddff-44cc-bd53-014017041b14-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200696 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-scripts\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200730 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6174475a-ddff-44cc-bd53-014017041b14-logs\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200765 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data-custom\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200805 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.200830 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.201459 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6174475a-ddff-44cc-bd53-014017041b14-logs\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.201815 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6174475a-ddff-44cc-bd53-014017041b14-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.213977 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.216932 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-scripts\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.217976 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.290687 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data-custom\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.335810 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5r7w\" (UniqueName: \"kubernetes.io/projected/6174475a-ddff-44cc-bd53-014017041b14-kube-api-access-l5r7w\") pod \"cinder-api-0\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.400736 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" event={"ID":"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb","Type":"ContainerStarted","Data":"0a30eba24af8989557be3f48addb1982eac1ad717d24eb1d39b16ac689a3f9c7"} Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.401484 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.408144 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b6df574b-x89tq" event={"ID":"e418bacc-47a2-45cd-9bb3-35e42563c482","Type":"ContainerStarted","Data":"652527d704e9af3da85b0af9e3beb2f30d15497a8de15dfa72f7eddec5585a9c"} Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.428480 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" event={"ID":"925d81d0-2e07-4a27-a8d1-7edff62fe070","Type":"ContainerStarted","Data":"80320dc1ac00edd8e7e17c15db1ac29b49d4919a50d7b7d659b7b25f6e14c701"} Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.463548 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6456f55dbc-h7p5b" event={"ID":"7457bf9f-e5dd-47af-9346-898a62273a3a","Type":"ContainerStarted","Data":"9d6d09ef7751fd3a38b59065c57a5cf672379b2ff57a9533d70ea69851c018f1"} Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.463601 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6456f55dbc-h7p5b" event={"ID":"7457bf9f-e5dd-47af-9346-898a62273a3a","Type":"ContainerStarted","Data":"9eaf869736a07ca28c7e2b213df4964b57a02690b64b44a078c1af3218eab093"} Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.481434 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" podStartSLOduration=6.481407444 podStartE2EDuration="6.481407444s" podCreationTimestamp="2026-01-30 21:50:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:14.442131046 +0000 UTC m=+2003.234032302" watchObservedRunningTime="2026-01-30 21:50:14.481407444 +0000 UTC m=+2003.273308690" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.499587 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6456f55dbc-h7p5b" podStartSLOduration=3.240840954 podStartE2EDuration="6.499565167s" podCreationTimestamp="2026-01-30 21:50:08 +0000 UTC" firstStartedPulling="2026-01-30 21:50:09.566452135 +0000 UTC m=+1998.358353381" lastFinishedPulling="2026-01-30 21:50:12.825176348 +0000 UTC m=+2001.617077594" observedRunningTime="2026-01-30 21:50:14.488487273 +0000 UTC m=+2003.280388519" watchObservedRunningTime="2026-01-30 21:50:14.499565167 +0000 UTC m=+2003.291466403" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.515693 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.766579 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-qmlfm"] Jan 30 21:50:14 crc kubenswrapper[4721]: I0130 21:50:14.969738 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.185762 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.216484 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-p8n56"] Jan 30 21:50:15 crc kubenswrapper[4721]: W0130 21:50:15.292444 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6174475a_ddff_44cc_bd53_014017041b14.slice/crio-262387a334ef5c022bc787c3ba1b631649d3f50d7f5862c4b82e1ec8955c2375 WatchSource:0}: Error finding container 262387a334ef5c022bc787c3ba1b631649d3f50d7f5862c4b82e1ec8955c2375: Status 404 returned error can't find the container with id 262387a334ef5c022bc787c3ba1b631649d3f50d7f5862c4b82e1ec8955c2375 Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.510702 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6174475a-ddff-44cc-bd53-014017041b14","Type":"ContainerStarted","Data":"262387a334ef5c022bc787c3ba1b631649d3f50d7f5862c4b82e1ec8955c2375"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.516114 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.519453 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-qmlfm" event={"ID":"453b837a-3b2e-4993-90e4-ea72ad1f6b9e","Type":"ContainerStarted","Data":"d7307dcb02312290efaedd3ca8fe92c3e1a45e1d4b8c17f0ba4e2d114a3c5b66"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.519499 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-qmlfm" event={"ID":"453b837a-3b2e-4993-90e4-ea72ad1f6b9e","Type":"ContainerStarted","Data":"342e2ec24c7bbdcefe8b1209d6d2fadefa5adfb5649aca51f38361716ba472a8"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.534115 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c56f8a6d-9d54-49cd-90e1-2f1003a54794","Type":"ContainerStarted","Data":"21a4e9b2c932436df40fc25a6cf841ac1756ad1df9e83e804ee52bb3f83f86e6"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.547958 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b6df574b-x89tq" event={"ID":"e418bacc-47a2-45cd-9bb3-35e42563c482","Type":"ContainerStarted","Data":"3d5f2ddd619a835cc2c3f535319cb6df3e3affe3814f465c129a7eeea9135e43"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.548044 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b6df574b-x89tq" event={"ID":"e418bacc-47a2-45cd-9bb3-35e42563c482","Type":"ContainerStarted","Data":"763b93b8bb07abea8a335c97a2fb0362de1b45bec2dc4a92ccccb9dac3914a22"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.549442 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.549476 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.560598 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-qmlfm" podStartSLOduration=2.560580289 podStartE2EDuration="2.560580289s" podCreationTimestamp="2026-01-30 21:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:15.547741431 +0000 UTC m=+2004.339642687" watchObservedRunningTime="2026-01-30 21:50:15.560580289 +0000 UTC m=+2004.352481535" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.577664 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" event={"ID":"925d81d0-2e07-4a27-a8d1-7edff62fe070","Type":"ContainerStarted","Data":"6fc2bbdccb49d8711168553100d07f75643a71460ebafa07175e0a1d70d2f293"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.596451 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" event={"ID":"37728bd9-18e7-4391-a9ff-87c282bb1c72","Type":"ContainerStarted","Data":"879f71471751f9fb5a6207e0730c95ee7a4f631d6d2fc21437b9ae5010b5cffc"} Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.596672 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerName="dnsmasq-dns" containerID="cri-o://0a30eba24af8989557be3f48addb1982eac1ad717d24eb1d39b16ac689a3f9c7" gracePeriod=10 Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.634820 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-68b6df574b-x89tq" podStartSLOduration=4.634799121 podStartE2EDuration="4.634799121s" podCreationTimestamp="2026-01-30 21:50:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:15.577443942 +0000 UTC m=+2004.369345198" watchObservedRunningTime="2026-01-30 21:50:15.634799121 +0000 UTC m=+2004.426700367" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.656273 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6db69d5bd6-grvs7" podStartSLOduration=4.564292747 podStartE2EDuration="7.656252527s" podCreationTimestamp="2026-01-30 21:50:08 +0000 UTC" firstStartedPulling="2026-01-30 21:50:09.731469854 +0000 UTC m=+1998.523371100" lastFinishedPulling="2026-01-30 21:50:12.823429624 +0000 UTC m=+2001.615330880" observedRunningTime="2026-01-30 21:50:15.60927463 +0000 UTC m=+2004.401175876" watchObservedRunningTime="2026-01-30 21:50:15.656252527 +0000 UTC m=+2004.448153773" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.960974 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.962271 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.966962 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.967020 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-5mncs" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.967173 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 30 21:50:15 crc kubenswrapper[4721]: I0130 21:50:15.979623 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.052552 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-659s6\" (UniqueName: \"kubernetes.io/projected/fc69e094-e84a-44d5-9a2c-726bac11b1c2-kube-api-access-659s6\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.052900 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fc69e094-e84a-44d5-9a2c-726bac11b1c2-openstack-config-secret\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.052947 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc69e094-e84a-44d5-9a2c-726bac11b1c2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.052983 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fc69e094-e84a-44d5-9a2c-726bac11b1c2-openstack-config\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.155369 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-659s6\" (UniqueName: \"kubernetes.io/projected/fc69e094-e84a-44d5-9a2c-726bac11b1c2-kube-api-access-659s6\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.155439 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fc69e094-e84a-44d5-9a2c-726bac11b1c2-openstack-config-secret\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.155496 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc69e094-e84a-44d5-9a2c-726bac11b1c2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.155540 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fc69e094-e84a-44d5-9a2c-726bac11b1c2-openstack-config\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.156965 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/fc69e094-e84a-44d5-9a2c-726bac11b1c2-openstack-config\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.166928 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc69e094-e84a-44d5-9a2c-726bac11b1c2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.209190 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/fc69e094-e84a-44d5-9a2c-726bac11b1c2-openstack-config-secret\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.210974 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-659s6\" (UniqueName: \"kubernetes.io/projected/fc69e094-e84a-44d5-9a2c-726bac11b1c2-kube-api-access-659s6\") pod \"openstackclient\" (UID: \"fc69e094-e84a-44d5-9a2c-726bac11b1c2\") " pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.330935 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.630903 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6174475a-ddff-44cc-bd53-014017041b14","Type":"ContainerStarted","Data":"da1156b6cfa9cbb5dec372bafc689f98c90b587eca2214879259bb53798484ce"} Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.661024 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerID="0a30eba24af8989557be3f48addb1982eac1ad717d24eb1d39b16ac689a3f9c7" exitCode=0 Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.661101 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" event={"ID":"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb","Type":"ContainerDied","Data":"0a30eba24af8989557be3f48addb1982eac1ad717d24eb1d39b16ac689a3f9c7"} Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.667732 4721 generic.go:334] "Generic (PLEG): container finished" podID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerID="3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d" exitCode=0 Jan 30 21:50:16 crc kubenswrapper[4721]: I0130 21:50:16.669256 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" event={"ID":"37728bd9-18e7-4391-a9ff-87c282bb1c72","Type":"ContainerDied","Data":"3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d"} Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.231954 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.298305 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-nb\") pod \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.323527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-config\") pod \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.323887 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-sb\") pod \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.323958 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2h49\" (UniqueName: \"kubernetes.io/projected/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-kube-api-access-w2h49\") pod \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.324158 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-svc\") pod \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.324208 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-swift-storage-0\") pod \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\" (UID: \"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb\") " Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.361915 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-kube-api-access-w2h49" (OuterVolumeSpecName: "kube-api-access-w2h49") pod "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" (UID: "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb"). InnerVolumeSpecName "kube-api-access-w2h49". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.447041 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" (UID: "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.452249 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.452286 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2h49\" (UniqueName: \"kubernetes.io/projected/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-kube-api-access-w2h49\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.483255 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.557557 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" (UID: "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.584519 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-config" (OuterVolumeSpecName: "config") pod "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" (UID: "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.586241 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" (UID: "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.601927 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" (UID: "8aeedebb-2a13-44dd-b9a2-add4ee0b94bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.657174 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.657539 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.657553 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.657561 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.687120 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"fc69e094-e84a-44d5-9a2c-726bac11b1c2","Type":"ContainerStarted","Data":"a3f7a14e4cb9f889438b56055abbb6e88568160e6bf8d66e4dd59a2b62c06507"} Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.691854 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" event={"ID":"8aeedebb-2a13-44dd-b9a2-add4ee0b94bb","Type":"ContainerDied","Data":"415b7988b793b8d7deb3aa97c6959bab027e0fdce13e1e2aa5083cc5794e38c0"} Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.691907 4721 scope.go:117] "RemoveContainer" containerID="0a30eba24af8989557be3f48addb1982eac1ad717d24eb1d39b16ac689a3f9c7" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.692032 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-zcp2h" Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.708072 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" event={"ID":"37728bd9-18e7-4391-a9ff-87c282bb1c72","Type":"ContainerStarted","Data":"f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa"} Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.737081 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-zcp2h"] Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.744937 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-zcp2h"] Jan 30 21:50:17 crc kubenswrapper[4721]: I0130 21:50:17.777547 4721 scope.go:117] "RemoveContainer" containerID="916ec534266fce7926d19495d65c66267422e59196791f93aa56cf68c21a45f2" Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.103863 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" path="/var/lib/kubelet/pods/8aeedebb-2a13-44dd-b9a2-add4ee0b94bb/volumes" Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.721569 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c56f8a6d-9d54-49cd-90e1-2f1003a54794","Type":"ContainerStarted","Data":"cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7"} Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.725817 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6174475a-ddff-44cc-bd53-014017041b14","Type":"ContainerStarted","Data":"596501f8297f5d2e4929bede8dae43db0098c39e282989116b08b39485ea530f"} Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.725874 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.725890 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.725890 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api-log" containerID="cri-o://da1156b6cfa9cbb5dec372bafc689f98c90b587eca2214879259bb53798484ce" gracePeriod=30 Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.725911 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api" containerID="cri-o://596501f8297f5d2e4929bede8dae43db0098c39e282989116b08b39485ea530f" gracePeriod=30 Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.755784 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" podStartSLOduration=5.7557592920000005 podStartE2EDuration="5.755759292s" podCreationTimestamp="2026-01-30 21:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:18.744805812 +0000 UTC m=+2007.536707068" watchObservedRunningTime="2026-01-30 21:50:18.755759292 +0000 UTC m=+2007.547660548" Jan 30 21:50:18 crc kubenswrapper[4721]: I0130 21:50:18.778201 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.778183427 podStartE2EDuration="5.778183427s" podCreationTimestamp="2026-01-30 21:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:18.768977171 +0000 UTC m=+2007.560878427" watchObservedRunningTime="2026-01-30 21:50:18.778183427 +0000 UTC m=+2007.570084673" Jan 30 21:50:19 crc kubenswrapper[4721]: I0130 21:50:19.738770 4721 generic.go:334] "Generic (PLEG): container finished" podID="6174475a-ddff-44cc-bd53-014017041b14" containerID="596501f8297f5d2e4929bede8dae43db0098c39e282989116b08b39485ea530f" exitCode=0 Jan 30 21:50:19 crc kubenswrapper[4721]: I0130 21:50:19.739122 4721 generic.go:334] "Generic (PLEG): container finished" podID="6174475a-ddff-44cc-bd53-014017041b14" containerID="da1156b6cfa9cbb5dec372bafc689f98c90b587eca2214879259bb53798484ce" exitCode=143 Jan 30 21:50:19 crc kubenswrapper[4721]: I0130 21:50:19.738893 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6174475a-ddff-44cc-bd53-014017041b14","Type":"ContainerDied","Data":"596501f8297f5d2e4929bede8dae43db0098c39e282989116b08b39485ea530f"} Jan 30 21:50:19 crc kubenswrapper[4721]: I0130 21:50:19.739247 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6174475a-ddff-44cc-bd53-014017041b14","Type":"ContainerDied","Data":"da1156b6cfa9cbb5dec372bafc689f98c90b587eca2214879259bb53798484ce"} Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.479885 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.479885 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.797800 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.809985 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c56f8a6d-9d54-49cd-90e1-2f1003a54794","Type":"ContainerStarted","Data":"37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616"} Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.816135 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6174475a-ddff-44cc-bd53-014017041b14","Type":"ContainerDied","Data":"262387a334ef5c022bc787c3ba1b631649d3f50d7f5862c4b82e1ec8955c2375"} Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.816338 4721 scope.go:117] "RemoveContainer" containerID="596501f8297f5d2e4929bede8dae43db0098c39e282989116b08b39485ea530f" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.816532 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.855771 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-combined-ca-bundle\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.856587 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-scripts\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.856722 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6174475a-ddff-44cc-bd53-014017041b14-logs\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.856624 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.465985872 podStartE2EDuration="7.856604569s" podCreationTimestamp="2026-01-30 21:50:13 +0000 UTC" firstStartedPulling="2026-01-30 21:50:14.986444569 +0000 UTC m=+2003.778345805" lastFinishedPulling="2026-01-30 21:50:17.377063266 +0000 UTC m=+2006.168964502" observedRunningTime="2026-01-30 21:50:20.852853012 +0000 UTC m=+2009.644754268" watchObservedRunningTime="2026-01-30 21:50:20.856604569 +0000 UTC m=+2009.648505805" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.857171 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5r7w\" (UniqueName: \"kubernetes.io/projected/6174475a-ddff-44cc-bd53-014017041b14-kube-api-access-l5r7w\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.857280 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.857422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6174475a-ddff-44cc-bd53-014017041b14-etc-machine-id\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.857569 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data-custom\") pod \"6174475a-ddff-44cc-bd53-014017041b14\" (UID: \"6174475a-ddff-44cc-bd53-014017041b14\") " Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.860749 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6174475a-ddff-44cc-bd53-014017041b14-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.861589 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6174475a-ddff-44cc-bd53-014017041b14-logs" (OuterVolumeSpecName: "logs") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.863476 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-scripts" (OuterVolumeSpecName: "scripts") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.864616 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6174475a-ddff-44cc-bd53-014017041b14-kube-api-access-l5r7w" (OuterVolumeSpecName: "kube-api-access-l5r7w") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "kube-api-access-l5r7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.870422 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.892628 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.926518 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data" (OuterVolumeSpecName: "config-data") pod "6174475a-ddff-44cc-bd53-014017041b14" (UID: "6174475a-ddff-44cc-bd53-014017041b14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.960146 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.960477 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5r7w\" (UniqueName: \"kubernetes.io/projected/6174475a-ddff-44cc-bd53-014017041b14-kube-api-access-l5r7w\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.960575 4721 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6174475a-ddff-44cc-bd53-014017041b14-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.960758 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.960829 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.961054 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6174475a-ddff-44cc-bd53-014017041b14-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:20 crc kubenswrapper[4721]: I0130 21:50:20.961106 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6174475a-ddff-44cc-bd53-014017041b14-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.042865 4721 scope.go:117] "RemoveContainer" containerID="da1156b6cfa9cbb5dec372bafc689f98c90b587eca2214879259bb53798484ce" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.181018 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.200381 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.225388 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:21 crc kubenswrapper[4721]: E0130 21:50:21.225853 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.225874 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api" Jan 30 21:50:21 crc kubenswrapper[4721]: E0130 21:50:21.225888 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api-log" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.225896 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api-log" Jan 30 21:50:21 crc kubenswrapper[4721]: E0130 21:50:21.225915 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerName="dnsmasq-dns" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.225924 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerName="dnsmasq-dns" Jan 30 21:50:21 crc kubenswrapper[4721]: E0130 21:50:21.225936 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerName="init" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.225942 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerName="init" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.226125 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aeedebb-2a13-44dd-b9a2-add4ee0b94bb" containerName="dnsmasq-dns" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.226141 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api-log" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.226153 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6174475a-ddff-44cc-bd53-014017041b14" containerName="cinder-api" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.227268 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.236946 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.242861 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.243451 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.244084 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.281752 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-config-data-custom\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.281838 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.281931 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49h65\" (UniqueName: \"kubernetes.io/projected/2e690f08-a69f-4b8a-9698-f66afbf94f43-kube-api-access-49h65\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.281957 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-config-data\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.281985 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e690f08-a69f-4b8a-9698-f66afbf94f43-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.282007 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e690f08-a69f-4b8a-9698-f66afbf94f43-logs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.282075 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.282106 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.282174 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-scripts\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384500 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-scripts\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384620 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-config-data-custom\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384667 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384699 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49h65\" (UniqueName: \"kubernetes.io/projected/2e690f08-a69f-4b8a-9698-f66afbf94f43-kube-api-access-49h65\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384716 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-config-data\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384742 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e690f08-a69f-4b8a-9698-f66afbf94f43-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384759 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e690f08-a69f-4b8a-9698-f66afbf94f43-logs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384812 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.384836 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.385704 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e690f08-a69f-4b8a-9698-f66afbf94f43-logs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.385805 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e690f08-a69f-4b8a-9698-f66afbf94f43-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.390727 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-config-data-custom\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.391364 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.392483 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-config-data\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.392657 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.392997 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-scripts\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.398497 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e690f08-a69f-4b8a-9698-f66afbf94f43-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.410869 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49h65\" (UniqueName: \"kubernetes.io/projected/2e690f08-a69f-4b8a-9698-f66afbf94f43-kube-api-access-49h65\") pod \"cinder-api-0\" (UID: \"2e690f08-a69f-4b8a-9698-f66afbf94f43\") " pod="openstack/cinder-api-0" Jan 30 21:50:21 crc kubenswrapper[4721]: I0130 21:50:21.581035 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 30 21:50:22 crc kubenswrapper[4721]: I0130 21:50:22.045654 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 30 21:50:22 crc kubenswrapper[4721]: W0130 21:50:22.050660 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e690f08_a69f_4b8a_9698_f66afbf94f43.slice/crio-86b7fe845bdb57458069403f27a896ece7a0937fece0c37009d3097cff923c60 WatchSource:0}: Error finding container 86b7fe845bdb57458069403f27a896ece7a0937fece0c37009d3097cff923c60: Status 404 returned error can't find the container with id 86b7fe845bdb57458069403f27a896ece7a0937fece0c37009d3097cff923c60 Jan 30 21:50:22 crc kubenswrapper[4721]: I0130 21:50:22.115487 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6174475a-ddff-44cc-bd53-014017041b14" path="/var/lib/kubelet/pods/6174475a-ddff-44cc-bd53-014017041b14/volumes" Jan 30 21:50:22 crc kubenswrapper[4721]: I0130 21:50:22.852458 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2e690f08-a69f-4b8a-9698-f66afbf94f43","Type":"ContainerStarted","Data":"86b7fe845bdb57458069403f27a896ece7a0937fece0c37009d3097cff923c60"} Jan 30 21:50:23 crc kubenswrapper[4721]: I0130 21:50:23.864871 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2e690f08-a69f-4b8a-9698-f66afbf94f43","Type":"ContainerStarted","Data":"a2ede26d137d2078d54de9aca07dbe15d36bdb057e5afe603664ae83e2328683"} Jan 30 21:50:23 crc kubenswrapper[4721]: I0130 21:50:23.921454 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 30 21:50:23 crc kubenswrapper[4721]: I0130 21:50:23.923737 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.185:8080/\": dial tcp 10.217.0.185:8080: connect: connection refused" Jan 30 21:50:24 crc kubenswrapper[4721]: I0130 21:50:24.201578 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:24 crc kubenswrapper[4721]: I0130 21:50:24.287278 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-5sg8c"] Jan 30 21:50:24 crc kubenswrapper[4721]: I0130 21:50:24.287529 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="dnsmasq-dns" containerID="cri-o://8cee18d15c81ed5577ae43697288f605301f7f1175647c1b407e1e42044441d6" gracePeriod=10 Jan 30 21:50:24 crc kubenswrapper[4721]: I0130 21:50:24.475506 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:24 crc kubenswrapper[4721]: I0130 21:50:24.475580 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:25 crc kubenswrapper[4721]: I0130 21:50:25.358028 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.168:5353: connect: connection refused" Jan 30 21:50:25 crc kubenswrapper[4721]: I0130 21:50:25.563922 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:25 crc kubenswrapper[4721]: I0130 21:50:25.564034 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.048763 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-68b6df574b-x89tq" podUID="e418bacc-47a2-45cd-9bb3-35e42563c482" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.183:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.048787 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-68b6df574b-x89tq" podUID="e418bacc-47a2-45cd-9bb3-35e42563c482" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.183:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.913531 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.913801 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68b6df574b-x89tq" podUID="e418bacc-47a2-45cd-9bb3-35e42563c482" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.914403 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68b6df574b-x89tq" podUID="e418bacc-47a2-45cd-9bb3-35e42563c482" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.914579 4721 generic.go:334] "Generic (PLEG): container finished" podID="4698d6a6-e501-4f42-b6d0-172334487746" containerID="8cee18d15c81ed5577ae43697288f605301f7f1175647c1b407e1e42044441d6" exitCode=0 Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.914639 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" event={"ID":"4698d6a6-e501-4f42-b6d0-172334487746","Type":"ContainerDied","Data":"8cee18d15c81ed5577ae43697288f605301f7f1175647c1b407e1e42044441d6"} Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.916029 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.917245 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2e690f08-a69f-4b8a-9698-f66afbf94f43","Type":"ContainerStarted","Data":"10d83c8eafead297b90de6ca96a125c4dd27cdcf59c0e3cde34b47d1d7dd216f"} Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.918445 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 30 21:50:26 crc kubenswrapper[4721]: I0130 21:50:26.952153 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.952127348 podStartE2EDuration="5.952127348s" podCreationTimestamp="2026-01-30 21:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:26.941998724 +0000 UTC m=+2015.733899970" watchObservedRunningTime="2026-01-30 21:50:26.952127348 +0000 UTC m=+2015.744028594" Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.360171 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.730999 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.796981 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68b6df574b-x89tq" Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.911021 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-c6f84b47b-7hn7s"] Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.917619 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" containerID="cri-o://b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712" gracePeriod=30 Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.918393 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" containerID="cri-o://38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4" gracePeriod=30 Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.923059 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.185:8080/\": dial tcp 10.217.0.185:8080: connect: connection refused" Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.949276 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": EOF" Jan 30 21:50:28 crc kubenswrapper[4721]: I0130 21:50:28.950152 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": EOF" Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.449013 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.449126 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.449190 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.450081 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c7f0e72b263faf1f8c74cf000b0aea1b54cadc81c92f3cce81eb40376057c48"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.450156 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://6c7f0e72b263faf1f8c74cf000b0aea1b54cadc81c92f3cce81eb40376057c48" gracePeriod=600 Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.971771 4721 generic.go:334] "Generic (PLEG): container finished" podID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerID="b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712" exitCode=143 Jan 30 21:50:29 crc kubenswrapper[4721]: I0130 21:50:29.972074 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6f84b47b-7hn7s" event={"ID":"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469","Type":"ContainerDied","Data":"b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712"} Jan 30 21:50:30 crc kubenswrapper[4721]: I0130 21:50:30.987887 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="6c7f0e72b263faf1f8c74cf000b0aea1b54cadc81c92f3cce81eb40376057c48" exitCode=0 Jan 30 21:50:30 crc kubenswrapper[4721]: I0130 21:50:30.988022 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"6c7f0e72b263faf1f8c74cf000b0aea1b54cadc81c92f3cce81eb40376057c48"} Jan 30 21:50:30 crc kubenswrapper[4721]: I0130 21:50:30.988520 4721 scope.go:117] "RemoveContainer" containerID="cfc7ba3b61e040d1c21c3c729c0b2bff09e3bec714107ce2cec22e06c530679e" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.181536 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.181796 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-central-agent" containerID="cri-o://54c498dc7adc7dcf0952499beee857eb48d868a2f5536440590dc0c5ce441c9c" gracePeriod=30 Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.181878 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-notification-agent" containerID="cri-o://05846b20f1a784244e1256c20804886049c326803873082b572b948348aede3c" gracePeriod=30 Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.181911 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="sg-core" containerID="cri-o://9fc1b258d9893762813bf34eff9e539ff558bc5c12e6d75eec578cd3036607c3" gracePeriod=30 Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.181902 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="proxy-httpd" containerID="cri-o://2794980561bb3f8a50f27e22f17663fafa3ec6d2d78d4d797ccb92b725ff23b3" gracePeriod=30 Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.690184 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-746946b9f5-f7fdd"] Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.692289 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.696782 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.697109 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.697254 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.706703 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-746946b9f5-f7fdd"] Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.745430 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-config-data\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.745633 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-combined-ca-bundle\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.745728 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6bkz\" (UniqueName: \"kubernetes.io/projected/46e77d9a-8263-4821-be29-a13929dd4448-kube-api-access-l6bkz\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.745767 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/46e77d9a-8263-4821-be29-a13929dd4448-etc-swift\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.745942 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-public-tls-certs\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.745998 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46e77d9a-8263-4821-be29-a13929dd4448-log-httpd\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.746168 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46e77d9a-8263-4821-be29-a13929dd4448-run-httpd\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.746225 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-internal-tls-certs\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851030 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-combined-ca-bundle\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851128 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6bkz\" (UniqueName: \"kubernetes.io/projected/46e77d9a-8263-4821-be29-a13929dd4448-kube-api-access-l6bkz\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/46e77d9a-8263-4821-be29-a13929dd4448-etc-swift\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851266 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-public-tls-certs\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851324 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46e77d9a-8263-4821-be29-a13929dd4448-log-httpd\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851427 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46e77d9a-8263-4821-be29-a13929dd4448-run-httpd\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-internal-tls-certs\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.851570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-config-data\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.852579 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46e77d9a-8263-4821-be29-a13929dd4448-run-httpd\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.852926 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46e77d9a-8263-4821-be29-a13929dd4448-log-httpd\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.861405 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-public-tls-certs\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.861448 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-combined-ca-bundle\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.862450 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-config-data\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.864038 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/46e77d9a-8263-4821-be29-a13929dd4448-etc-swift\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.868919 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46e77d9a-8263-4821-be29-a13929dd4448-internal-tls-certs\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:31 crc kubenswrapper[4721]: I0130 21:50:31.871989 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6bkz\" (UniqueName: \"kubernetes.io/projected/46e77d9a-8263-4821-be29-a13929dd4448-kube-api-access-l6bkz\") pod \"swift-proxy-746946b9f5-f7fdd\" (UID: \"46e77d9a-8263-4821-be29-a13929dd4448\") " pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.007552 4721 generic.go:334] "Generic (PLEG): container finished" podID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerID="2794980561bb3f8a50f27e22f17663fafa3ec6d2d78d4d797ccb92b725ff23b3" exitCode=0 Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.008029 4721 generic.go:334] "Generic (PLEG): container finished" podID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerID="9fc1b258d9893762813bf34eff9e539ff558bc5c12e6d75eec578cd3036607c3" exitCode=2 Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.008041 4721 generic.go:334] "Generic (PLEG): container finished" podID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerID="54c498dc7adc7dcf0952499beee857eb48d868a2f5536440590dc0c5ce441c9c" exitCode=0 Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.007872 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerDied","Data":"2794980561bb3f8a50f27e22f17663fafa3ec6d2d78d4d797ccb92b725ff23b3"} Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.008079 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerDied","Data":"9fc1b258d9893762813bf34eff9e539ff558bc5c12e6d75eec578cd3036607c3"} Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.008094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerDied","Data":"54c498dc7adc7dcf0952499beee857eb48d868a2f5536440590dc0c5ce441c9c"} Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.017493 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:32 crc kubenswrapper[4721]: E0130 21:50:32.242600 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Jan 30 21:50:32 crc kubenswrapper[4721]: E0130 21:50:32.242795 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n67bh664h59h64dh85hc4h64dh9h67chdfh5d5h556h5b4h677hb5h56hf5h657h547h84h569h57h565h57bh588hbfhd7h5b6h597h95h585h5d7q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_CA_CERT,Value:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-659s6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(fc69e094-e84a-44d5-9a2c-726bac11b1c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 21:50:32 crc kubenswrapper[4721]: E0130 21:50:32.243989 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="fc69e094-e84a-44d5-9a2c-726bac11b1c2" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.376874 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": read tcp 10.217.0.2:41300->10.217.0.182:9311: read: connection reset by peer" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.377044 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-c6f84b47b-7hn7s" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": read tcp 10.217.0.2:41304->10.217.0.182:9311: read: connection reset by peer" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.387057 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.471289 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-svc\") pod \"4698d6a6-e501-4f42-b6d0-172334487746\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.471370 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgzdc\" (UniqueName: \"kubernetes.io/projected/4698d6a6-e501-4f42-b6d0-172334487746-kube-api-access-lgzdc\") pod \"4698d6a6-e501-4f42-b6d0-172334487746\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.471484 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-sb\") pod \"4698d6a6-e501-4f42-b6d0-172334487746\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.471583 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-config\") pod \"4698d6a6-e501-4f42-b6d0-172334487746\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.471633 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-swift-storage-0\") pod \"4698d6a6-e501-4f42-b6d0-172334487746\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.471682 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-nb\") pod \"4698d6a6-e501-4f42-b6d0-172334487746\" (UID: \"4698d6a6-e501-4f42-b6d0-172334487746\") " Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.514813 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4698d6a6-e501-4f42-b6d0-172334487746-kube-api-access-lgzdc" (OuterVolumeSpecName: "kube-api-access-lgzdc") pod "4698d6a6-e501-4f42-b6d0-172334487746" (UID: "4698d6a6-e501-4f42-b6d0-172334487746"). InnerVolumeSpecName "kube-api-access-lgzdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.590118 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgzdc\" (UniqueName: \"kubernetes.io/projected/4698d6a6-e501-4f42-b6d0-172334487746-kube-api-access-lgzdc\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.646288 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4698d6a6-e501-4f42-b6d0-172334487746" (UID: "4698d6a6-e501-4f42-b6d0-172334487746"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.656489 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4698d6a6-e501-4f42-b6d0-172334487746" (UID: "4698d6a6-e501-4f42-b6d0-172334487746"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.657578 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-config" (OuterVolumeSpecName: "config") pod "4698d6a6-e501-4f42-b6d0-172334487746" (UID: "4698d6a6-e501-4f42-b6d0-172334487746"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.663118 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4698d6a6-e501-4f42-b6d0-172334487746" (UID: "4698d6a6-e501-4f42-b6d0-172334487746"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.679110 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4698d6a6-e501-4f42-b6d0-172334487746" (UID: "4698d6a6-e501-4f42-b6d0-172334487746"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.693095 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.694390 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.694506 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.694616 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.694713 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4698d6a6-e501-4f42-b6d0-172334487746-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:32 crc kubenswrapper[4721]: W0130 21:50:32.840684 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46e77d9a_8263_4821_be29_a13929dd4448.slice/crio-869cf861f8e021b3ca03da4b103589d7e7e69c855bcf9422a91d16e7e58a9625 WatchSource:0}: Error finding container 869cf861f8e021b3ca03da4b103589d7e7e69c855bcf9422a91d16e7e58a9625: Status 404 returned error can't find the container with id 869cf861f8e021b3ca03da4b103589d7e7e69c855bcf9422a91d16e7e58a9625 Jan 30 21:50:32 crc kubenswrapper[4721]: I0130 21:50:32.842867 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-746946b9f5-f7fdd"] Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.001615 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.023516 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.025426 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" event={"ID":"4698d6a6-e501-4f42-b6d0-172334487746","Type":"ContainerDied","Data":"0c3442c574ff2c1de7f6a86c6487c4611945d27c20a0255d99d92b713d9bd2f3"} Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.025514 4721 scope.go:117] "RemoveContainer" containerID="8cee18d15c81ed5577ae43697288f605301f7f1175647c1b407e1e42044441d6" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.034999 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-746946b9f5-f7fdd" event={"ID":"46e77d9a-8263-4821-be29-a13929dd4448","Type":"ContainerStarted","Data":"869cf861f8e021b3ca03da4b103589d7e7e69c855bcf9422a91d16e7e58a9625"} Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.043862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b"} Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.049078 4721 generic.go:334] "Generic (PLEG): container finished" podID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerID="38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4" exitCode=0 Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.049991 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c6f84b47b-7hn7s" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.050167 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6f84b47b-7hn7s" event={"ID":"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469","Type":"ContainerDied","Data":"38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4"} Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.050205 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6f84b47b-7hn7s" event={"ID":"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469","Type":"ContainerDied","Data":"fec2543a82c3557234970a7d37ea09f63c67db297b6a9e56f0a630799b5e6841"} Jan 30 21:50:33 crc kubenswrapper[4721]: E0130 21:50:33.051205 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="fc69e094-e84a-44d5-9a2c-726bac11b1c2" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.097692 4721 scope.go:117] "RemoveContainer" containerID="c385d6b39276c63483dcf4bc9dedd26e84d677c28450e2197db99399ab655234" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.107382 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk7vs\" (UniqueName: \"kubernetes.io/projected/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-kube-api-access-kk7vs\") pod \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.107487 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-logs\") pod \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.107534 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data\") pod \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.107651 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data-custom\") pod \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.107800 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-combined-ca-bundle\") pod \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\" (UID: \"67b0ba99-c20f-4d7c-bc18-de6fe4ba8469\") " Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.108425 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-logs" (OuterVolumeSpecName: "logs") pod "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" (UID: "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.108761 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.113396 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-kube-api-access-kk7vs" (OuterVolumeSpecName: "kube-api-access-kk7vs") pod "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" (UID: "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469"). InnerVolumeSpecName "kube-api-access-kk7vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.118508 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" (UID: "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.140999 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-5sg8c"] Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.152480 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" (UID: "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.154306 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-5sg8c"] Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.194590 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data" (OuterVolumeSpecName: "config-data") pod "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" (UID: "67b0ba99-c20f-4d7c-bc18-de6fe4ba8469"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.211098 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.211132 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.211144 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk7vs\" (UniqueName: \"kubernetes.io/projected/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-kube-api-access-kk7vs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.211154 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.289560 4721 scope.go:117] "RemoveContainer" containerID="38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.312236 4721 scope.go:117] "RemoveContainer" containerID="b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.346371 4721 scope.go:117] "RemoveContainer" containerID="38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4" Jan 30 21:50:33 crc kubenswrapper[4721]: E0130 21:50:33.346718 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4\": container with ID starting with 38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4 not found: ID does not exist" containerID="38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.346759 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4"} err="failed to get container status \"38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4\": rpc error: code = NotFound desc = could not find container \"38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4\": container with ID starting with 38f698788ba54fe4d2d159705fec8789fbc1cc7f77b851b8c31b7521c17e35d4 not found: ID does not exist" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.346784 4721 scope.go:117] "RemoveContainer" containerID="b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712" Jan 30 21:50:33 crc kubenswrapper[4721]: E0130 21:50:33.347045 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712\": container with ID starting with b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712 not found: ID does not exist" containerID="b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.347084 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712"} err="failed to get container status \"b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712\": rpc error: code = NotFound desc = could not find container \"b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712\": container with ID starting with b3e15e559b72b3f9bd6814a99fce6e582cb627a6a364ef3c702643568ac02712 not found: ID does not exist" Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.395805 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-c6f84b47b-7hn7s"] Jan 30 21:50:33 crc kubenswrapper[4721]: I0130 21:50:33.410965 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-c6f84b47b-7hn7s"] Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.059164 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-746946b9f5-f7fdd" event={"ID":"46e77d9a-8263-4821-be29-a13929dd4448","Type":"ContainerStarted","Data":"6170a78658609646bd77162648ac54e824348f857d98560cfc947cde0b2b30f4"} Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.059506 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-746946b9f5-f7fdd" event={"ID":"46e77d9a-8263-4821-be29-a13929dd4448","Type":"ContainerStarted","Data":"09c9acc55fa2b9d8cfb763511ae16875fe46dd18412762fbdb0d7354d29aab64"} Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.059532 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.059713 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.065156 4721 generic.go:334] "Generic (PLEG): container finished" podID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerID="05846b20f1a784244e1256c20804886049c326803873082b572b948348aede3c" exitCode=0 Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.065252 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerDied","Data":"05846b20f1a784244e1256c20804886049c326803873082b572b948348aede3c"} Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.065290 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec6217ba-45b0-4472-a464-b389ae7b872f","Type":"ContainerDied","Data":"0f9dd8db472650eb973b106f80a5cac96a80f34088639cf276740b773b992190"} Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.065332 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f9dd8db472650eb973b106f80a5cac96a80f34088639cf276740b773b992190" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.106851 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4698d6a6-e501-4f42-b6d0-172334487746" path="/var/lib/kubelet/pods/4698d6a6-e501-4f42-b6d0-172334487746/volumes" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.107250 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-746946b9f5-f7fdd" podStartSLOduration=3.107231045 podStartE2EDuration="3.107231045s" podCreationTimestamp="2026-01-30 21:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:34.081202628 +0000 UTC m=+2022.873103874" watchObservedRunningTime="2026-01-30 21:50:34.107231045 +0000 UTC m=+2022.899132291" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.107869 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" path="/var/lib/kubelet/pods/67b0ba99-c20f-4d7c-bc18-de6fe4ba8469/volumes" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.133372 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.231575 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.232864 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-run-httpd\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.233101 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-combined-ca-bundle\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.233386 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-config-data\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.233517 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-sg-core-conf-yaml\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.234904 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf5tw\" (UniqueName: \"kubernetes.io/projected/ec6217ba-45b0-4472-a464-b389ae7b872f-kube-api-access-gf5tw\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.234997 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-scripts\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.235087 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-log-httpd\") pod \"ec6217ba-45b0-4472-a464-b389ae7b872f\" (UID: \"ec6217ba-45b0-4472-a464-b389ae7b872f\") " Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.233894 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.237224 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.243513 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6217ba-45b0-4472-a464-b389ae7b872f-kube-api-access-gf5tw" (OuterVolumeSpecName: "kube-api-access-gf5tw") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "kube-api-access-gf5tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.259574 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-scripts" (OuterVolumeSpecName: "scripts") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.288505 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.325071 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.338609 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf5tw\" (UniqueName: \"kubernetes.io/projected/ec6217ba-45b0-4472-a464-b389ae7b872f-kube-api-access-gf5tw\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.338640 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.338650 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.338665 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6217ba-45b0-4472-a464-b389ae7b872f-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.338680 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.348695 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.370818 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.423565 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-config-data" (OuterVolumeSpecName: "config-data") pod "ec6217ba-45b0-4472-a464-b389ae7b872f" (UID: "ec6217ba-45b0-4472-a464-b389ae7b872f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.441187 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.441434 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6217ba-45b0-4472-a464-b389ae7b872f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:34 crc kubenswrapper[4721]: I0130 21:50:34.967063 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.077640 4721 generic.go:334] "Generic (PLEG): container finished" podID="453b837a-3b2e-4993-90e4-ea72ad1f6b9e" containerID="d7307dcb02312290efaedd3ca8fe92c3e1a45e1d4b8c17f0ba4e2d114a3c5b66" exitCode=0 Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.077748 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.077751 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-qmlfm" event={"ID":"453b837a-3b2e-4993-90e4-ea72ad1f6b9e","Type":"ContainerDied","Data":"d7307dcb02312290efaedd3ca8fe92c3e1a45e1d4b8c17f0ba4e2d114a3c5b66"} Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.078418 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="cinder-scheduler" containerID="cri-o://cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7" gracePeriod=30 Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.078495 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="probe" containerID="cri-o://37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616" gracePeriod=30 Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.141608 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.155670 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.169368 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.169896 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-notification-agent" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.169922 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-notification-agent" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.169942 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="dnsmasq-dns" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.169950 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="dnsmasq-dns" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.169970 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.169978 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.170000 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170009 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.170019 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="proxy-httpd" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170027 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="proxy-httpd" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.170040 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-central-agent" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170047 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-central-agent" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.170062 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="init" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170069 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="init" Jan 30 21:50:35 crc kubenswrapper[4721]: E0130 21:50:35.170094 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="sg-core" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170103 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="sg-core" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170385 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-notification-agent" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170400 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api-log" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170426 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="ceilometer-central-agent" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170443 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="67b0ba99-c20f-4d7c-bc18-de6fe4ba8469" containerName="barbican-api" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170460 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="sg-core" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170479 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="dnsmasq-dns" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.170489 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" containerName="proxy-httpd" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.172871 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.175740 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.175805 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.181708 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259608 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-scripts\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259651 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-run-httpd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259790 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-config-data\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259825 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-log-httpd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259845 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.259981 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fjkd\" (UniqueName: \"kubernetes.io/projected/fbe0d79a-cba6-4b21-91cb-643d7a49a145-kube-api-access-4fjkd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.270812 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5c759f49d6-k22ln" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.358283 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-5sg8c" podUID="4698d6a6-e501-4f42-b6d0-172334487746" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.168:5353: i/o timeout" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361501 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-scripts\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361552 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-run-httpd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-config-data\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361678 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-log-httpd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361700 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.361856 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fjkd\" (UniqueName: \"kubernetes.io/projected/fbe0d79a-cba6-4b21-91cb-643d7a49a145-kube-api-access-4fjkd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.363376 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-log-httpd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.363437 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-run-httpd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.369453 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.369471 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-config-data\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.389113 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.395784 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-scripts\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.396099 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fjkd\" (UniqueName: \"kubernetes.io/projected/fbe0d79a-cba6-4b21-91cb-643d7a49a145-kube-api-access-4fjkd\") pod \"ceilometer-0\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " pod="openstack/ceilometer-0" Jan 30 21:50:35 crc kubenswrapper[4721]: I0130 21:50:35.499352 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.087316 4721 generic.go:334] "Generic (PLEG): container finished" podID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerID="37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616" exitCode=0 Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.087362 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c56f8a6d-9d54-49cd-90e1-2f1003a54794","Type":"ContainerDied","Data":"37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616"} Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.105601 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec6217ba-45b0-4472-a464-b389ae7b872f" path="/var/lib/kubelet/pods/ec6217ba-45b0-4472-a464-b389ae7b872f/volumes" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.137921 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:36 crc kubenswrapper[4721]: W0130 21:50:36.146630 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbe0d79a_cba6_4b21_91cb_643d7a49a145.slice/crio-c39db39e135d7123eb09f62f9da15fc5be6bba083cfaaf5bb9da585a40620d78 WatchSource:0}: Error finding container c39db39e135d7123eb09f62f9da15fc5be6bba083cfaaf5bb9da585a40620d78: Status 404 returned error can't find the container with id c39db39e135d7123eb09f62f9da15fc5be6bba083cfaaf5bb9da585a40620d78 Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.149056 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.454393 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.596309 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-scripts\") pod \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.596405 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwgzs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-kube-api-access-rwgzs\") pod \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.596513 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-combined-ca-bundle\") pod \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.596698 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-certs\") pod \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.596759 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-config-data\") pod \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\" (UID: \"453b837a-3b2e-4993-90e4-ea72ad1f6b9e\") " Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.607671 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-certs" (OuterVolumeSpecName: "certs") pod "453b837a-3b2e-4993-90e4-ea72ad1f6b9e" (UID: "453b837a-3b2e-4993-90e4-ea72ad1f6b9e"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.607826 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-kube-api-access-rwgzs" (OuterVolumeSpecName: "kube-api-access-rwgzs") pod "453b837a-3b2e-4993-90e4-ea72ad1f6b9e" (UID: "453b837a-3b2e-4993-90e4-ea72ad1f6b9e"). InnerVolumeSpecName "kube-api-access-rwgzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.615896 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-scripts" (OuterVolumeSpecName: "scripts") pod "453b837a-3b2e-4993-90e4-ea72ad1f6b9e" (UID: "453b837a-3b2e-4993-90e4-ea72ad1f6b9e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.659477 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-config-data" (OuterVolumeSpecName: "config-data") pod "453b837a-3b2e-4993-90e4-ea72ad1f6b9e" (UID: "453b837a-3b2e-4993-90e4-ea72ad1f6b9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.660921 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "453b837a-3b2e-4993-90e4-ea72ad1f6b9e" (UID: "453b837a-3b2e-4993-90e4-ea72ad1f6b9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.699005 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.699045 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.699057 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.699066 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwgzs\" (UniqueName: \"kubernetes.io/projected/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-kube-api-access-rwgzs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.699079 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453b837a-3b2e-4993-90e4-ea72ad1f6b9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:36 crc kubenswrapper[4721]: I0130 21:50:36.980101 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.097944 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerStarted","Data":"c263511d4f473fadfd0e477c25ded510ac7c9a12ccfdec87d711b043e07d4b3c"} Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.098006 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerStarted","Data":"c39db39e135d7123eb09f62f9da15fc5be6bba083cfaaf5bb9da585a40620d78"} Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.100045 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-qmlfm" event={"ID":"453b837a-3b2e-4993-90e4-ea72ad1f6b9e","Type":"ContainerDied","Data":"342e2ec24c7bbdcefe8b1209d6d2fadefa5adfb5649aca51f38361716ba472a8"} Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.100084 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="342e2ec24c7bbdcefe8b1209d6d2fadefa5adfb5649aca51f38361716ba472a8" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.100095 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-qmlfm" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.102861 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c56f8a6d-9d54-49cd-90e1-2f1003a54794","Type":"ContainerDied","Data":"cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7"} Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.102872 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.103049 4721 scope.go:117] "RemoveContainer" containerID="37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.102809 4721 generic.go:334] "Generic (PLEG): container finished" podID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerID="cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7" exitCode=0 Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.103192 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c56f8a6d-9d54-49cd-90e1-2f1003a54794","Type":"ContainerDied","Data":"21a4e9b2c932436df40fc25a6cf841ac1756ad1df9e83e804ee52bb3f83f86e6"} Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.106510 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-combined-ca-bundle\") pod \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.106677 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data\") pod \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.106721 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9w5c\" (UniqueName: \"kubernetes.io/projected/c56f8a6d-9d54-49cd-90e1-2f1003a54794-kube-api-access-s9w5c\") pod \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.106793 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data-custom\") pod \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.107454 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c56f8a6d-9d54-49cd-90e1-2f1003a54794-etc-machine-id\") pod \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.107575 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c56f8a6d-9d54-49cd-90e1-2f1003a54794-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c56f8a6d-9d54-49cd-90e1-2f1003a54794" (UID: "c56f8a6d-9d54-49cd-90e1-2f1003a54794"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.107678 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-scripts\") pod \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\" (UID: \"c56f8a6d-9d54-49cd-90e1-2f1003a54794\") " Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.108227 4721 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c56f8a6d-9d54-49cd-90e1-2f1003a54794-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.111445 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c56f8a6d-9d54-49cd-90e1-2f1003a54794" (UID: "c56f8a6d-9d54-49cd-90e1-2f1003a54794"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.111915 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-scripts" (OuterVolumeSpecName: "scripts") pod "c56f8a6d-9d54-49cd-90e1-2f1003a54794" (UID: "c56f8a6d-9d54-49cd-90e1-2f1003a54794"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.113492 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c56f8a6d-9d54-49cd-90e1-2f1003a54794-kube-api-access-s9w5c" (OuterVolumeSpecName: "kube-api-access-s9w5c") pod "c56f8a6d-9d54-49cd-90e1-2f1003a54794" (UID: "c56f8a6d-9d54-49cd-90e1-2f1003a54794"). InnerVolumeSpecName "kube-api-access-s9w5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.135527 4721 scope.go:117] "RemoveContainer" containerID="cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.181096 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c56f8a6d-9d54-49cd-90e1-2f1003a54794" (UID: "c56f8a6d-9d54-49cd-90e1-2f1003a54794"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.210837 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.210974 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.211035 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9w5c\" (UniqueName: \"kubernetes.io/projected/c56f8a6d-9d54-49cd-90e1-2f1003a54794-kube-api-access-s9w5c\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.211093 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.258668 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data" (OuterVolumeSpecName: "config-data") pod "c56f8a6d-9d54-49cd-90e1-2f1003a54794" (UID: "c56f8a6d-9d54-49cd-90e1-2f1003a54794"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.316238 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: E0130 21:50:37.317079 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="453b837a-3b2e-4993-90e4-ea72ad1f6b9e" containerName="cloudkitty-storageinit" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.317099 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="453b837a-3b2e-4993-90e4-ea72ad1f6b9e" containerName="cloudkitty-storageinit" Jan 30 21:50:37 crc kubenswrapper[4721]: E0130 21:50:37.317136 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="probe" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.317145 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="probe" Jan 30 21:50:37 crc kubenswrapper[4721]: E0130 21:50:37.317158 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="cinder-scheduler" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.317166 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="cinder-scheduler" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.317442 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="probe" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.317471 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="453b837a-3b2e-4993-90e4-ea72ad1f6b9e" containerName="cloudkitty-storageinit" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.317489 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" containerName="cinder-scheduler" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.318839 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56f8a6d-9d54-49cd-90e1-2f1003a54794-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.325617 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.338910 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-sss92" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.339181 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.339327 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.339989 4721 scope.go:117] "RemoveContainer" containerID="37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616" Jan 30 21:50:37 crc kubenswrapper[4721]: E0130 21:50:37.342696 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616\": container with ID starting with 37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616 not found: ID does not exist" containerID="37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.342863 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616"} err="failed to get container status \"37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616\": rpc error: code = NotFound desc = could not find container \"37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616\": container with ID starting with 37d699c6f5f0f48387e3d352e71b9196ff91b7d13b84bf07b644973d1e8e1616 not found: ID does not exist" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.344011 4721 scope.go:117] "RemoveContainer" containerID="cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.344519 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.351120 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Jan 30 21:50:37 crc kubenswrapper[4721]: E0130 21:50:37.351608 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7\": container with ID starting with cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7 not found: ID does not exist" containerID="cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.351799 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7"} err="failed to get container status \"cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7\": rpc error: code = NotFound desc = could not find container \"cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7\": container with ID starting with cba19a3b20a91d40f0397a64515c98d69b9b395626826a68e9123db517622af7 not found: ID does not exist" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.368838 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.382598 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6585f8c7-9cbpt"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.385524 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.408737 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6585f8c7-9cbpt"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.420563 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.420733 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpckj\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-kube-api-access-gpckj\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.420821 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-scripts\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.420915 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.420993 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.421059 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-certs\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.479364 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.494317 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.510245 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.512484 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.523694 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525211 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-config\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxz59\" (UniqueName: \"kubernetes.io/projected/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-kube-api-access-jxz59\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525338 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpckj\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-kube-api-access-gpckj\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525404 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-scripts\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525449 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-svc\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525476 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-swift-storage-0\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525497 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525522 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-certs\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.525622 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.538674 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-scripts\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.539258 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.541415 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-certs\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.541826 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.542536 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.542887 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.555545 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpckj\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-kube-api-access-gpckj\") pod \"cloudkitty-proc-0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.588362 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.591029 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.596772 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.608789 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628108 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-scripts\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628360 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-logs\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-config\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxz59\" (UniqueName: \"kubernetes.io/projected/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-kube-api-access-jxz59\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628647 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-svc\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628725 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-swift-storage-0\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628792 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628884 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.628962 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppgbp\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-kube-api-access-ppgbp\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.629036 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.629108 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.629174 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-certs\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.629265 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.630074 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-config\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.630890 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-svc\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.631484 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-swift-storage-0\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.632077 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.633975 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.679999 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxz59\" (UniqueName: \"kubernetes.io/projected/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-kube-api-access-jxz59\") pod \"dnsmasq-dns-6c6585f8c7-9cbpt\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.688091 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.716790 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.733830 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-config-data\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.733890 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.733928 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-scripts\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.733949 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-scripts\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.733981 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-logs\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734032 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r8wl\" (UniqueName: \"kubernetes.io/projected/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-kube-api-access-5r8wl\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734092 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734190 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppgbp\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-kube-api-access-ppgbp\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734224 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734255 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734279 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-certs\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734320 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.734343 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.744874 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-logs\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.745590 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-scripts\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.746363 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.748364 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.750784 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.760219 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-certs\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.774862 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppgbp\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-kube-api-access-ppgbp\") pod \"cloudkitty-api-0\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836044 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-scripts\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836366 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r8wl\" (UniqueName: \"kubernetes.io/projected/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-kube-api-access-5r8wl\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836411 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836515 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836568 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-config-data\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.836997 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.840830 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.841213 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-scripts\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.845184 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.845430 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-config-data\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.846132 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.872849 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r8wl\" (UniqueName: \"kubernetes.io/projected/6b2e94bb-fd95-448e-8ab0-b79d741fd7f5-kube-api-access-5r8wl\") pod \"cinder-scheduler-0\" (UID: \"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5\") " pod="openstack/cinder-scheduler-0" Jan 30 21:50:37 crc kubenswrapper[4721]: I0130 21:50:37.922664 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 30 21:50:38 crc kubenswrapper[4721]: I0130 21:50:38.114766 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c56f8a6d-9d54-49cd-90e1-2f1003a54794" path="/var/lib/kubelet/pods/c56f8a6d-9d54-49cd-90e1-2f1003a54794/volumes" Jan 30 21:50:38 crc kubenswrapper[4721]: I0130 21:50:38.399812 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:38 crc kubenswrapper[4721]: I0130 21:50:38.400478 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6585f8c7-9cbpt"] Jan 30 21:50:38 crc kubenswrapper[4721]: I0130 21:50:38.605167 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:38 crc kubenswrapper[4721]: I0130 21:50:38.739127 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.205454 4721 generic.go:334] "Generic (PLEG): container finished" podID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerID="33dfbbfef66356b75dcf7be8b9e2fa768e3971a4138a930fbcb480a49549e106" exitCode=0 Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.205951 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" event={"ID":"1c3d3653-e283-4f1e-909f-e59ebfcf82f0","Type":"ContainerDied","Data":"33dfbbfef66356b75dcf7be8b9e2fa768e3971a4138a930fbcb480a49549e106"} Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.205981 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" event={"ID":"1c3d3653-e283-4f1e-909f-e59ebfcf82f0","Type":"ContainerStarted","Data":"8e54db8409e2eba864fb7b2b767d6a1cc48e542a38e04167e9deae89b72ae06e"} Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.238898 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerStarted","Data":"0c3980b5ae7082fecb792052a34e9f964a13e32802b91bb4d0d4b6c3e90ebbaa"} Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.257890 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5","Type":"ContainerStarted","Data":"18d1cdbaf958ce2e9cd8acd4fc63539a547b2a5649c1f15b0f2e169b18f643ab"} Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.276227 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0","Type":"ContainerStarted","Data":"e0518f36ecea6f687b08ab1879a4068563f9d292192f2ba5ed6862f8bad4c3fa"} Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.292470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5a3cda76-b9dd-4ef1-9821-251d3245d5b5","Type":"ContainerStarted","Data":"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba"} Jan 30 21:50:39 crc kubenswrapper[4721]: I0130 21:50:39.292631 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5a3cda76-b9dd-4ef1-9821-251d3245d5b5","Type":"ContainerStarted","Data":"d19742dd4e9a989fcc807d309e987b429a3d3ebd8f7b6a6ecf1386ba625464bd"} Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.302919 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerStarted","Data":"ac51df5a91981e4a7632975b0507e043e6153fe2b933d2cae90ce638f6aefe1b"} Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.305358 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5","Type":"ContainerStarted","Data":"3749d6500f3c63f854ab38713a8ae821fba5279fbd94ba4ada284bbb966bdd7b"} Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.307709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5a3cda76-b9dd-4ef1-9821-251d3245d5b5","Type":"ContainerStarted","Data":"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c"} Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.308710 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.312168 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" event={"ID":"1c3d3653-e283-4f1e-909f-e59ebfcf82f0","Type":"ContainerStarted","Data":"d84e6fe0dc81e7bba3f335390a28e571dd22f28c5e6a19cc0f54061b74af833c"} Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.312341 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.332476 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.332451038 podStartE2EDuration="3.332451038s" podCreationTimestamp="2026-01-30 21:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:40.322816739 +0000 UTC m=+2029.114718015" watchObservedRunningTime="2026-01-30 21:50:40.332451038 +0000 UTC m=+2029.124352294" Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.353262 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" podStartSLOduration=3.353239353 podStartE2EDuration="3.353239353s" podCreationTimestamp="2026-01-30 21:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:40.347960359 +0000 UTC m=+2029.139861605" watchObservedRunningTime="2026-01-30 21:50:40.353239353 +0000 UTC m=+2029.145140599" Jan 30 21:50:40 crc kubenswrapper[4721]: I0130 21:50:40.779998 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.035195 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.038671 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-746946b9f5-f7fdd" Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.335004 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6b2e94bb-fd95-448e-8ab0-b79d741fd7f5","Type":"ContainerStarted","Data":"df08c43e528b210f8e3117f9d5b534f84b740f9edea35982c6df634ad8395531"} Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.337290 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0","Type":"ContainerStarted","Data":"7c9fbc0cdd931808935b46b6997a70f8558b3d54f63d9d37debd85866fe5f9ae"} Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.342469 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerStarted","Data":"43182ace1f07c07e7c2a1fe5bcd31291ccbe683fe10955db9b0d6ed5818dc5e9"} Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.342501 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.342647 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api-log" containerID="cri-o://379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba" gracePeriod=30 Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.342961 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api" containerID="cri-o://db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c" gracePeriod=30 Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.360558 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.360538888 podStartE2EDuration="5.360538888s" podCreationTimestamp="2026-01-30 21:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:42.352900621 +0000 UTC m=+2031.144801867" watchObservedRunningTime="2026-01-30 21:50:42.360538888 +0000 UTC m=+2031.152440134" Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.380253 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.1120194 podStartE2EDuration="7.380234378s" podCreationTimestamp="2026-01-30 21:50:35 +0000 UTC" firstStartedPulling="2026-01-30 21:50:36.148795433 +0000 UTC m=+2024.940696689" lastFinishedPulling="2026-01-30 21:50:41.417010421 +0000 UTC m=+2030.208911667" observedRunningTime="2026-01-30 21:50:42.377286858 +0000 UTC m=+2031.169188104" watchObservedRunningTime="2026-01-30 21:50:42.380234378 +0000 UTC m=+2031.172135624" Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.410022 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.398782386 podStartE2EDuration="5.410000962s" podCreationTimestamp="2026-01-30 21:50:37 +0000 UTC" firstStartedPulling="2026-01-30 21:50:38.404525125 +0000 UTC m=+2027.196426371" lastFinishedPulling="2026-01-30 21:50:41.415743701 +0000 UTC m=+2030.207644947" observedRunningTime="2026-01-30 21:50:42.395682978 +0000 UTC m=+2031.187584224" watchObservedRunningTime="2026-01-30 21:50:42.410000962 +0000 UTC m=+2031.201902208" Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.424458 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:42 crc kubenswrapper[4721]: I0130 21:50:42.923955 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.025765 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.129902 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-logs\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.130253 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-scripts\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.130286 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data-custom\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.130384 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.130421 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppgbp\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-kube-api-access-ppgbp\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.130451 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-combined-ca-bundle\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.130570 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-certs\") pod \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\" (UID: \"5a3cda76-b9dd-4ef1-9821-251d3245d5b5\") " Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.132285 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-logs" (OuterVolumeSpecName: "logs") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.138817 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.139015 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-kube-api-access-ppgbp" (OuterVolumeSpecName: "kube-api-access-ppgbp") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "kube-api-access-ppgbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.139098 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-scripts" (OuterVolumeSpecName: "scripts") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.143279 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-certs" (OuterVolumeSpecName: "certs") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.166493 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data" (OuterVolumeSpecName: "config-data") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.173473 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a3cda76-b9dd-4ef1-9821-251d3245d5b5" (UID: "5a3cda76-b9dd-4ef1-9821-251d3245d5b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233416 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233449 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233459 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233469 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233478 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233493 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppgbp\" (UniqueName: \"kubernetes.io/projected/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-kube-api-access-ppgbp\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.233503 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3cda76-b9dd-4ef1-9821-251d3245d5b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.353886 4721 generic.go:334] "Generic (PLEG): container finished" podID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerID="db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c" exitCode=0 Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.353915 4721 generic.go:334] "Generic (PLEG): container finished" podID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerID="379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba" exitCode=143 Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.354704 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.362397 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5a3cda76-b9dd-4ef1-9821-251d3245d5b5","Type":"ContainerDied","Data":"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c"} Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.362476 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5a3cda76-b9dd-4ef1-9821-251d3245d5b5","Type":"ContainerDied","Data":"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba"} Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.362504 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5a3cda76-b9dd-4ef1-9821-251d3245d5b5","Type":"ContainerDied","Data":"d19742dd4e9a989fcc807d309e987b429a3d3ebd8f7b6a6ecf1386ba625464bd"} Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.362525 4721 scope.go:117] "RemoveContainer" containerID="db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.394739 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.406001 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.412613 4721 scope.go:117] "RemoveContainer" containerID="379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.419002 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:43 crc kubenswrapper[4721]: E0130 21:50:43.419642 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.419856 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api" Jan 30 21:50:43 crc kubenswrapper[4721]: E0130 21:50:43.419995 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api-log" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.420094 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api-log" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.420431 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.420564 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" containerName="cloudkitty-api-log" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.422040 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.434936 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.435186 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.435396 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.469064 4721 scope.go:117] "RemoveContainer" containerID="db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.470950 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:43 crc kubenswrapper[4721]: E0130 21:50:43.471755 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c\": container with ID starting with db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c not found: ID does not exist" containerID="db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.471789 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c"} err="failed to get container status \"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c\": rpc error: code = NotFound desc = could not find container \"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c\": container with ID starting with db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c not found: ID does not exist" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.471812 4721 scope.go:117] "RemoveContainer" containerID="379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba" Jan 30 21:50:43 crc kubenswrapper[4721]: E0130 21:50:43.474987 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba\": container with ID starting with 379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba not found: ID does not exist" containerID="379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.475098 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba"} err="failed to get container status \"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba\": rpc error: code = NotFound desc = could not find container \"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba\": container with ID starting with 379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba not found: ID does not exist" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.475200 4721 scope.go:117] "RemoveContainer" containerID="db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.475939 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c"} err="failed to get container status \"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c\": rpc error: code = NotFound desc = could not find container \"db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c\": container with ID starting with db1644548ed0428632ca43832fec5b589a0b9c91ee34d5d702d165a51dfd4b6c not found: ID does not exist" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.475977 4721 scope.go:117] "RemoveContainer" containerID="379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.476326 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba"} err="failed to get container status \"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba\": rpc error: code = NotFound desc = could not find container \"379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba\": container with ID starting with 379d1b7d2452e4af08d28792c3995c9c7ff62932a6ba80211ac5b4e6d87bc1ba not found: ID does not exist" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.538673 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd01d7d-385c-4b0d-bd25-291e63104c09-logs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.538755 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.538787 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.538891 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-scripts\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.540328 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.540388 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.540629 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.540668 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.540713 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xw6r\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-kube-api-access-7xw6r\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644427 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644491 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644612 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644652 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xw6r\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-kube-api-access-7xw6r\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644724 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd01d7d-385c-4b0d-bd25-291e63104c09-logs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644794 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644832 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.644973 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-scripts\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.647611 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd01d7d-385c-4b0d-bd25-291e63104c09-logs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.648663 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-scripts\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.658458 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.662019 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.662469 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.663236 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.663423 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.667111 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.667167 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xw6r\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-kube-api-access-7xw6r\") pod \"cloudkitty-api-0\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " pod="openstack/cloudkitty-api-0" Jan 30 21:50:43 crc kubenswrapper[4721]: I0130 21:50:43.749618 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:50:44 crc kubenswrapper[4721]: I0130 21:50:44.103800 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a3cda76-b9dd-4ef1-9821-251d3245d5b5" path="/var/lib/kubelet/pods/5a3cda76-b9dd-4ef1-9821-251d3245d5b5/volumes" Jan 30 21:50:44 crc kubenswrapper[4721]: I0130 21:50:44.248114 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:50:44 crc kubenswrapper[4721]: W0130 21:50:44.250665 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffd01d7d_385c_4b0d_bd25_291e63104c09.slice/crio-0575286f3f7e2019d65398c110ec8fd83f98bc88b45b186b11d33a2b8960b712 WatchSource:0}: Error finding container 0575286f3f7e2019d65398c110ec8fd83f98bc88b45b186b11d33a2b8960b712: Status 404 returned error can't find the container with id 0575286f3f7e2019d65398c110ec8fd83f98bc88b45b186b11d33a2b8960b712 Jan 30 21:50:44 crc kubenswrapper[4721]: I0130 21:50:44.366364 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"ffd01d7d-385c-4b0d-bd25-291e63104c09","Type":"ContainerStarted","Data":"0575286f3f7e2019d65398c110ec8fd83f98bc88b45b186b11d33a2b8960b712"} Jan 30 21:50:44 crc kubenswrapper[4721]: I0130 21:50:44.368061 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" containerName="cloudkitty-proc" containerID="cri-o://7c9fbc0cdd931808935b46b6997a70f8558b3d54f63d9d37debd85866fe5f9ae" gracePeriod=30 Jan 30 21:50:45 crc kubenswrapper[4721]: I0130 21:50:45.385769 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"ffd01d7d-385c-4b0d-bd25-291e63104c09","Type":"ContainerStarted","Data":"e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36"} Jan 30 21:50:45 crc kubenswrapper[4721]: I0130 21:50:45.387041 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"ffd01d7d-385c-4b0d-bd25-291e63104c09","Type":"ContainerStarted","Data":"a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6"} Jan 30 21:50:45 crc kubenswrapper[4721]: I0130 21:50:45.387329 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Jan 30 21:50:45 crc kubenswrapper[4721]: I0130 21:50:45.410667 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.41064752 podStartE2EDuration="2.41064752s" podCreationTimestamp="2026-01-30 21:50:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:45.406967376 +0000 UTC m=+2034.198868632" watchObservedRunningTime="2026-01-30 21:50:45.41064752 +0000 UTC m=+2034.202548766" Jan 30 21:50:46 crc kubenswrapper[4721]: I0130 21:50:46.977068 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:46 crc kubenswrapper[4721]: I0130 21:50:46.979236 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="proxy-httpd" containerID="cri-o://43182ace1f07c07e7c2a1fe5bcd31291ccbe683fe10955db9b0d6ed5818dc5e9" gracePeriod=30 Jan 30 21:50:46 crc kubenswrapper[4721]: I0130 21:50:46.979306 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="sg-core" containerID="cri-o://ac51df5a91981e4a7632975b0507e043e6153fe2b933d2cae90ce638f6aefe1b" gracePeriod=30 Jan 30 21:50:46 crc kubenswrapper[4721]: I0130 21:50:46.979335 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-notification-agent" containerID="cri-o://0c3980b5ae7082fecb792052a34e9f964a13e32802b91bb4d0d4b6c3e90ebbaa" gracePeriod=30 Jan 30 21:50:46 crc kubenswrapper[4721]: I0130 21:50:46.979199 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-central-agent" containerID="cri-o://c263511d4f473fadfd0e477c25ded510ac7c9a12ccfdec87d711b043e07d4b3c" gracePeriod=30 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.435864 4721 generic.go:334] "Generic (PLEG): container finished" podID="2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" containerID="7c9fbc0cdd931808935b46b6997a70f8558b3d54f63d9d37debd85866fe5f9ae" exitCode=0 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.436342 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0","Type":"ContainerDied","Data":"7c9fbc0cdd931808935b46b6997a70f8558b3d54f63d9d37debd85866fe5f9ae"} Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494077 4721 generic.go:334] "Generic (PLEG): container finished" podID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerID="43182ace1f07c07e7c2a1fe5bcd31291ccbe683fe10955db9b0d6ed5818dc5e9" exitCode=0 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494360 4721 generic.go:334] "Generic (PLEG): container finished" podID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerID="ac51df5a91981e4a7632975b0507e043e6153fe2b933d2cae90ce638f6aefe1b" exitCode=2 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494470 4721 generic.go:334] "Generic (PLEG): container finished" podID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerID="0c3980b5ae7082fecb792052a34e9f964a13e32802b91bb4d0d4b6c3e90ebbaa" exitCode=0 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494560 4721 generic.go:334] "Generic (PLEG): container finished" podID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerID="c263511d4f473fadfd0e477c25ded510ac7c9a12ccfdec87d711b043e07d4b3c" exitCode=0 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494150 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerDied","Data":"43182ace1f07c07e7c2a1fe5bcd31291ccbe683fe10955db9b0d6ed5818dc5e9"} Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494720 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerDied","Data":"ac51df5a91981e4a7632975b0507e043e6153fe2b933d2cae90ce638f6aefe1b"} Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494739 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerDied","Data":"0c3980b5ae7082fecb792052a34e9f964a13e32802b91bb4d0d4b6c3e90ebbaa"} Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.494748 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerDied","Data":"c263511d4f473fadfd0e477c25ded510ac7c9a12ccfdec87d711b043e07d4b3c"} Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.616232 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.673149 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data\") pod \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.673221 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-scripts\") pod \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.673356 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpckj\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-kube-api-access-gpckj\") pod \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.673445 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-certs\") pod \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.673498 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data-custom\") pod \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.673637 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-combined-ca-bundle\") pod \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\" (UID: \"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.697487 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" (UID: "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.697521 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-scripts" (OuterVolumeSpecName: "scripts") pod "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" (UID: "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.697581 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-kube-api-access-gpckj" (OuterVolumeSpecName: "kube-api-access-gpckj") pod "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" (UID: "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0"). InnerVolumeSpecName "kube-api-access-gpckj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.697620 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-certs" (OuterVolumeSpecName: "certs") pod "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" (UID: "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.718447 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" (UID: "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.720455 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.761937 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data" (OuterVolumeSpecName: "config-data") pod "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" (UID: "2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.780068 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.780099 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.780113 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.780124 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.780132 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.780140 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpckj\" (UniqueName: \"kubernetes.io/projected/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0-kube-api-access-gpckj\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.828413 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-p8n56"] Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.828637 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerName="dnsmasq-dns" containerID="cri-o://f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa" gracePeriod=10 Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.936745 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983105 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-run-httpd\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983194 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fjkd\" (UniqueName: \"kubernetes.io/projected/fbe0d79a-cba6-4b21-91cb-643d7a49a145-kube-api-access-4fjkd\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983236 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-combined-ca-bundle\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983259 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-sg-core-conf-yaml\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983311 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-config-data\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983465 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-scripts\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.983602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-log-httpd\") pod \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\" (UID: \"fbe0d79a-cba6-4b21-91cb-643d7a49a145\") " Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.984981 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.985219 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.988954 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbe0d79a-cba6-4b21-91cb-643d7a49a145-kube-api-access-4fjkd" (OuterVolumeSpecName: "kube-api-access-4fjkd") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "kube-api-access-4fjkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:47 crc kubenswrapper[4721]: I0130 21:50:47.994207 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-scripts" (OuterVolumeSpecName: "scripts") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.038790 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.086057 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.086368 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.086541 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fbe0d79a-cba6-4b21-91cb-643d7a49a145-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.086605 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fjkd\" (UniqueName: \"kubernetes.io/projected/fbe0d79a-cba6-4b21-91cb-643d7a49a145-kube-api-access-4fjkd\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.086663 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.116837 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.188923 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.212549 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-config-data" (OuterVolumeSpecName: "config-data") pod "fbe0d79a-cba6-4b21-91cb-643d7a49a145" (UID: "fbe0d79a-cba6-4b21-91cb-643d7a49a145"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.290670 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbe0d79a-cba6-4b21-91cb-643d7a49a145-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.321587 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.503938 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.508509 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fbe0d79a-cba6-4b21-91cb-643d7a49a145","Type":"ContainerDied","Data":"c39db39e135d7123eb09f62f9da15fc5be6bba083cfaaf5bb9da585a40620d78"} Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.508558 4721 scope.go:117] "RemoveContainer" containerID="43182ace1f07c07e7c2a1fe5bcd31291ccbe683fe10955db9b0d6ed5818dc5e9" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.508578 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.510758 4721 generic.go:334] "Generic (PLEG): container finished" podID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerID="f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa" exitCode=0 Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.510801 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" event={"ID":"37728bd9-18e7-4391-a9ff-87c282bb1c72","Type":"ContainerDied","Data":"f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa"} Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.510820 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" event={"ID":"37728bd9-18e7-4391-a9ff-87c282bb1c72","Type":"ContainerDied","Data":"879f71471751f9fb5a6207e0730c95ee7a4f631d6d2fc21437b9ae5010b5cffc"} Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.510860 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-p8n56" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.512694 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0","Type":"ContainerDied","Data":"e0518f36ecea6f687b08ab1879a4068563f9d292192f2ba5ed6862f8bad4c3fa"} Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.512736 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.559428 4721 scope.go:117] "RemoveContainer" containerID="ac51df5a91981e4a7632975b0507e043e6153fe2b933d2cae90ce638f6aefe1b" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.595358 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.596047 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-config\") pod \"37728bd9-18e7-4391-a9ff-87c282bb1c72\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.596153 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-sb\") pod \"37728bd9-18e7-4391-a9ff-87c282bb1c72\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.596181 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-nb\") pod \"37728bd9-18e7-4391-a9ff-87c282bb1c72\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.596226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nwxm\" (UniqueName: \"kubernetes.io/projected/37728bd9-18e7-4391-a9ff-87c282bb1c72-kube-api-access-7nwxm\") pod \"37728bd9-18e7-4391-a9ff-87c282bb1c72\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.596312 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-svc\") pod \"37728bd9-18e7-4391-a9ff-87c282bb1c72\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.596390 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-swift-storage-0\") pod \"37728bd9-18e7-4391-a9ff-87c282bb1c72\" (UID: \"37728bd9-18e7-4391-a9ff-87c282bb1c72\") " Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.613766 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37728bd9-18e7-4391-a9ff-87c282bb1c72-kube-api-access-7nwxm" (OuterVolumeSpecName: "kube-api-access-7nwxm") pod "37728bd9-18e7-4391-a9ff-87c282bb1c72" (UID: "37728bd9-18e7-4391-a9ff-87c282bb1c72"). InnerVolumeSpecName "kube-api-access-7nwxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.648737 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.671501 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.671976 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="sg-core" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.671993 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="sg-core" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.672006 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-central-agent" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672012 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-central-agent" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.672022 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" containerName="cloudkitty-proc" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672028 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" containerName="cloudkitty-proc" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.672051 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerName="init" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672058 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerName="init" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.672069 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-notification-agent" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672075 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-notification-agent" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.672085 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="proxy-httpd" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672091 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="proxy-httpd" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.672102 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerName="dnsmasq-dns" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672108 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerName="dnsmasq-dns" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672273 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-notification-agent" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672288 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="ceilometer-central-agent" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672319 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="sg-core" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672330 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" containerName="cloudkitty-proc" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672341 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" containerName="proxy-httpd" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.672356 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" containerName="dnsmasq-dns" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.675865 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.683254 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698505 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698623 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvkgd\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-kube-api-access-pvkgd\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698656 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698880 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698900 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-certs\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698917 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.698964 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nwxm\" (UniqueName: \"kubernetes.io/projected/37728bd9-18e7-4391-a9ff-87c282bb1c72-kube-api-access-7nwxm\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.703230 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.709636 4721 scope.go:117] "RemoveContainer" containerID="0c3980b5ae7082fecb792052a34e9f964a13e32802b91bb4d0d4b6c3e90ebbaa" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.714706 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.715513 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "37728bd9-18e7-4391-a9ff-87c282bb1c72" (UID: "37728bd9-18e7-4391-a9ff-87c282bb1c72"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.728240 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.736446 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.740258 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.742560 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.743960 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.745832 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "37728bd9-18e7-4391-a9ff-87c282bb1c72" (UID: "37728bd9-18e7-4391-a9ff-87c282bb1c72"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.747030 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-config" (OuterVolumeSpecName: "config") pod "37728bd9-18e7-4391-a9ff-87c282bb1c72" (UID: "37728bd9-18e7-4391-a9ff-87c282bb1c72"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.752330 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.759461 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "37728bd9-18e7-4391-a9ff-87c282bb1c72" (UID: "37728bd9-18e7-4391-a9ff-87c282bb1c72"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.768887 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "37728bd9-18e7-4391-a9ff-87c282bb1c72" (UID: "37728bd9-18e7-4391-a9ff-87c282bb1c72"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.782802 4721 scope.go:117] "RemoveContainer" containerID="c263511d4f473fadfd0e477c25ded510ac7c9a12ccfdec87d711b043e07d4b3c" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.800544 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvkgd\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-kube-api-access-pvkgd\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.800614 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801333 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801376 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-certs\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801470 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801573 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801743 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801759 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801771 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801780 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.801788 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37728bd9-18e7-4391-a9ff-87c282bb1c72-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.805458 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.805951 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.806011 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.809258 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.812908 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-certs\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.828468 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvkgd\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-kube-api-access-pvkgd\") pod \"cloudkitty-proc-0\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.834026 4721 scope.go:117] "RemoveContainer" containerID="f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.856925 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-p8n56"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.871733 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-p8n56"] Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.879066 4721 scope.go:117] "RemoveContainer" containerID="3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.903033 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.903344 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-log-httpd\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.903640 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-scripts\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.903669 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-config-data\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.903711 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-run-httpd\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.904127 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.904174 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8fkq\" (UniqueName: \"kubernetes.io/projected/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-kube-api-access-l8fkq\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.923985 4721 scope.go:117] "RemoveContainer" containerID="f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.924513 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa\": container with ID starting with f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa not found: ID does not exist" containerID="f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.924560 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa"} err="failed to get container status \"f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa\": rpc error: code = NotFound desc = could not find container \"f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa\": container with ID starting with f5d30011fb2ce515f45b59b8c09555d6aee0cf4fe0449dcd922bb80db20db1aa not found: ID does not exist" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.924620 4721 scope.go:117] "RemoveContainer" containerID="3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.924959 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d\": container with ID starting with 3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d not found: ID does not exist" containerID="3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.924983 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d"} err="failed to get container status \"3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d\": rpc error: code = NotFound desc = could not find container \"3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d\": container with ID starting with 3602ab62444aa515909d907968f83d448e6f2359b9ff1893f45a8a487086c04d not found: ID does not exist" Jan 30 21:50:48 crc kubenswrapper[4721]: I0130 21:50:48.924995 4721 scope.go:117] "RemoveContainer" containerID="7c9fbc0cdd931808935b46b6997a70f8558b3d54f63d9d37debd85866fe5f9ae" Jan 30 21:50:48 crc kubenswrapper[4721]: E0130 21:50:48.945666 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37728bd9_18e7_4391_a9ff_87c282bb1c72.slice/crio-879f71471751f9fb5a6207e0730c95ee7a4f631d6d2fc21437b9ae5010b5cffc\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46c7155a_444a_42b9_9e5d_183998bc5d22.slice/crio-conmon-608d96ad0b85b91fb46b12fb868f1a60f508ed4a4382de60affb327f786c3aa7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46c7155a_444a_42b9_9e5d_183998bc5d22.slice/crio-608d96ad0b85b91fb46b12fb868f1a60f508ed4a4382de60affb327f786c3aa7.scope\": RecentStats: unable to find data in memory cache]" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006175 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-log-httpd\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006291 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-scripts\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006397 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-config-data\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006447 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-run-httpd\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006552 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8fkq\" (UniqueName: \"kubernetes.io/projected/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-kube-api-access-l8fkq\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006611 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.006929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-log-httpd\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.007348 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-run-httpd\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.011349 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.012058 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.012086 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-scripts\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.012730 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-config-data\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.026211 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8fkq\" (UniqueName: \"kubernetes.io/projected/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-kube-api-access-l8fkq\") pod \"ceilometer-0\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.064157 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.071273 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.536566 4721 generic.go:334] "Generic (PLEG): container finished" podID="46c7155a-444a-42b9-9e5d-183998bc5d22" containerID="608d96ad0b85b91fb46b12fb868f1a60f508ed4a4382de60affb327f786c3aa7" exitCode=0 Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.536658 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jbqqk" event={"ID":"46c7155a-444a-42b9-9e5d-183998bc5d22","Type":"ContainerDied","Data":"608d96ad0b85b91fb46b12fb868f1a60f508ed4a4382de60affb327f786c3aa7"} Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.540375 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"fc69e094-e84a-44d5-9a2c-726bac11b1c2","Type":"ContainerStarted","Data":"d1bcab01587f7065b4aa35b4919de286f8cdeffa9cd060ed869a072dc0d9a4c7"} Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.564137 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:50:49 crc kubenswrapper[4721]: W0130 21:50:49.568796 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70ce3206_6193_4c77_93bf_9fdb366a9a7d.slice/crio-1cd9ca499a3f23b2fc6cf5154f34d200e9a9ca56e1e8881fa6384b98ad866981 WatchSource:0}: Error finding container 1cd9ca499a3f23b2fc6cf5154f34d200e9a9ca56e1e8881fa6384b98ad866981: Status 404 returned error can't find the container with id 1cd9ca499a3f23b2fc6cf5154f34d200e9a9ca56e1e8881fa6384b98ad866981 Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.576861 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.295855895 podStartE2EDuration="34.576842193s" podCreationTimestamp="2026-01-30 21:50:15 +0000 UTC" firstStartedPulling="2026-01-30 21:50:17.503541108 +0000 UTC m=+2006.295442354" lastFinishedPulling="2026-01-30 21:50:48.784527406 +0000 UTC m=+2037.576428652" observedRunningTime="2026-01-30 21:50:49.56773171 +0000 UTC m=+2038.359632956" watchObservedRunningTime="2026-01-30 21:50:49.576842193 +0000 UTC m=+2038.368743439" Jan 30 21:50:49 crc kubenswrapper[4721]: I0130 21:50:49.692781 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:50 crc kubenswrapper[4721]: I0130 21:50:50.110177 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0" path="/var/lib/kubelet/pods/2bdc3ec2-c37e-4306-b1aa-b14c85f19dd0/volumes" Jan 30 21:50:50 crc kubenswrapper[4721]: I0130 21:50:50.111474 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37728bd9-18e7-4391-a9ff-87c282bb1c72" path="/var/lib/kubelet/pods/37728bd9-18e7-4391-a9ff-87c282bb1c72/volumes" Jan 30 21:50:50 crc kubenswrapper[4721]: I0130 21:50:50.112816 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbe0d79a-cba6-4b21-91cb-643d7a49a145" path="/var/lib/kubelet/pods/fbe0d79a-cba6-4b21-91cb-643d7a49a145/volumes" Jan 30 21:50:50 crc kubenswrapper[4721]: I0130 21:50:50.560775 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"70ce3206-6193-4c77-93bf-9fdb366a9a7d","Type":"ContainerStarted","Data":"0a8dbbc7fcb51d39b000928f1057573deb327ac4c85033233a13392666c3077d"} Jan 30 21:50:50 crc kubenswrapper[4721]: I0130 21:50:50.560819 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"70ce3206-6193-4c77-93bf-9fdb366a9a7d","Type":"ContainerStarted","Data":"1cd9ca499a3f23b2fc6cf5154f34d200e9a9ca56e1e8881fa6384b98ad866981"} Jan 30 21:50:50 crc kubenswrapper[4721]: I0130 21:50:50.567874 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerStarted","Data":"74635ee03e725704fe049a0c5f8216c8e6d5a7ff540b6d36bed872fcc463b05f"} Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.222132 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.249222 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.249205529 podStartE2EDuration="3.249205529s" podCreationTimestamp="2026-01-30 21:50:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:50.581420504 +0000 UTC m=+2039.373321760" watchObservedRunningTime="2026-01-30 21:50:51.249205529 +0000 UTC m=+2040.041106775" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.703107 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-combined-ca-bundle\") pod \"46c7155a-444a-42b9-9e5d-183998bc5d22\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.703268 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfjqc\" (UniqueName: \"kubernetes.io/projected/46c7155a-444a-42b9-9e5d-183998bc5d22-kube-api-access-lfjqc\") pod \"46c7155a-444a-42b9-9e5d-183998bc5d22\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.703376 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-config\") pod \"46c7155a-444a-42b9-9e5d-183998bc5d22\" (UID: \"46c7155a-444a-42b9-9e5d-183998bc5d22\") " Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.720368 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46c7155a-444a-42b9-9e5d-183998bc5d22-kube-api-access-lfjqc" (OuterVolumeSpecName: "kube-api-access-lfjqc") pod "46c7155a-444a-42b9-9e5d-183998bc5d22" (UID: "46c7155a-444a-42b9-9e5d-183998bc5d22"). InnerVolumeSpecName "kube-api-access-lfjqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.801271 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46c7155a-444a-42b9-9e5d-183998bc5d22" (UID: "46c7155a-444a-42b9-9e5d-183998bc5d22"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.801600 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jbqqk" event={"ID":"46c7155a-444a-42b9-9e5d-183998bc5d22","Type":"ContainerDied","Data":"a10380d925fe1bc95a22c785413253939cd596828938677e3316078eb7de89d0"} Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.801801 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a10380d925fe1bc95a22c785413253939cd596828938677e3316078eb7de89d0" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.802400 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jbqqk" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.806008 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.806030 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfjqc\" (UniqueName: \"kubernetes.io/projected/46c7155a-444a-42b9-9e5d-183998bc5d22-kube-api-access-lfjqc\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.818291 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-config" (OuterVolumeSpecName: "config") pod "46c7155a-444a-42b9-9e5d-183998bc5d22" (UID: "46c7155a-444a-42b9-9e5d-183998bc5d22"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.885333 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-nb44g"] Jan 30 21:50:51 crc kubenswrapper[4721]: E0130 21:50:51.885834 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c7155a-444a-42b9-9e5d-183998bc5d22" containerName="neutron-db-sync" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.885851 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c7155a-444a-42b9-9e5d-183998bc5d22" containerName="neutron-db-sync" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.887118 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c7155a-444a-42b9-9e5d-183998bc5d22" containerName="neutron-db-sync" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.888449 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.904494 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-nb44g"] Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908177 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908232 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908261 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj7q4\" (UniqueName: \"kubernetes.io/projected/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-kube-api-access-kj7q4\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908339 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-config\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908356 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908374 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-svc\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:51 crc kubenswrapper[4721]: I0130 21:50:51.908657 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/46c7155a-444a-42b9-9e5d-183998bc5d22-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.011265 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.011330 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj7q4\" (UniqueName: \"kubernetes.io/projected/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-kube-api-access-kj7q4\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.011394 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.011412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-config\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.011429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-svc\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.011539 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.012133 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.012149 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.012533 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.012672 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-config\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.013968 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-svc\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.056127 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj7q4\" (UniqueName: \"kubernetes.io/projected/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-kube-api-access-kj7q4\") pod \"dnsmasq-dns-67bdc55879-nb44g\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.080436 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-74b8965846-bhqk4"] Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.088632 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.091702 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.118726 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-74b8965846-bhqk4"] Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.122070 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-config\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.122171 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-ovndb-tls-certs\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.122245 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6hw8\" (UniqueName: \"kubernetes.io/projected/354f3f63-4e99-4cd3-8523-8388ad02ae4d-kube-api-access-c6hw8\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.122293 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-httpd-config\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.122381 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-combined-ca-bundle\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.209203 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.223963 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-combined-ca-bundle\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.225051 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-config\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.225228 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-ovndb-tls-certs\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.225391 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6hw8\" (UniqueName: \"kubernetes.io/projected/354f3f63-4e99-4cd3-8523-8388ad02ae4d-kube-api-access-c6hw8\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.225665 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-httpd-config\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.228008 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.233901 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-combined-ca-bundle\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.236703 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-config\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.239618 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-ovndb-tls-certs\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.242589 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-httpd-config\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.247581 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6hw8\" (UniqueName: \"kubernetes.io/projected/354f3f63-4e99-4cd3-8523-8388ad02ae4d-kube-api-access-c6hw8\") pod \"neutron-74b8965846-bhqk4\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.461809 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.744862 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-nb44g"] Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.814880 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" event={"ID":"ecc0abab-30fa-418a-ac1f-f7f3d288c33d","Type":"ContainerStarted","Data":"556d2b9d583c114eee7b8bfb1c548a5a01958772322a870e483c9955f1ff9308"} Jan 30 21:50:52 crc kubenswrapper[4721]: I0130 21:50:52.822915 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerStarted","Data":"9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a"} Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.154969 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-74b8965846-bhqk4"] Jan 30 21:50:53 crc kubenswrapper[4721]: W0130 21:50:53.171536 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod354f3f63_4e99_4cd3_8523_8388ad02ae4d.slice/crio-467a33d7316cb76c9e45b0680a6f69c33640cdc1b7c8acd5409438d3e29bb5c6 WatchSource:0}: Error finding container 467a33d7316cb76c9e45b0680a6f69c33640cdc1b7c8acd5409438d3e29bb5c6: Status 404 returned error can't find the container with id 467a33d7316cb76c9e45b0680a6f69c33640cdc1b7c8acd5409438d3e29bb5c6 Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.679262 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.860925 4721 generic.go:334] "Generic (PLEG): container finished" podID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerID="02b4d59804b8d84741d4e19ec6c32d0330f1bbe1d63a3b6ded1fcbaa74a60764" exitCode=0 Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.861236 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" event={"ID":"ecc0abab-30fa-418a-ac1f-f7f3d288c33d","Type":"ContainerDied","Data":"02b4d59804b8d84741d4e19ec6c32d0330f1bbe1d63a3b6ded1fcbaa74a60764"} Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.866558 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerStarted","Data":"37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157"} Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.872786 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74b8965846-bhqk4" event={"ID":"354f3f63-4e99-4cd3-8523-8388ad02ae4d","Type":"ContainerStarted","Data":"e46eba6388c79d0d350f5351d58264de8bb7a1c1cd6010cbec3dd3e2d921c3e8"} Jan 30 21:50:53 crc kubenswrapper[4721]: I0130 21:50:53.872836 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74b8965846-bhqk4" event={"ID":"354f3f63-4e99-4cd3-8523-8388ad02ae4d","Type":"ContainerStarted","Data":"467a33d7316cb76c9e45b0680a6f69c33640cdc1b7c8acd5409438d3e29bb5c6"} Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.216899 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7cccf5fc8f-zbdml"] Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.219397 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.221447 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.221634 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.238754 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7cccf5fc8f-zbdml"] Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278319 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-config\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278386 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-ovndb-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278413 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-internal-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278442 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m2n7\" (UniqueName: \"kubernetes.io/projected/96081b17-acc8-4700-91da-9a966b7e7f1c-kube-api-access-9m2n7\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278494 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-httpd-config\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278563 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-public-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.278699 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-combined-ca-bundle\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.380992 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-combined-ca-bundle\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.381147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-config\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.381182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-ovndb-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.381202 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-internal-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.381222 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m2n7\" (UniqueName: \"kubernetes.io/projected/96081b17-acc8-4700-91da-9a966b7e7f1c-kube-api-access-9m2n7\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.381244 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-httpd-config\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.381273 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-public-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.390451 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-httpd-config\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.390876 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-ovndb-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.391091 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-config\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.405018 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-internal-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.405653 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-public-tls-certs\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.406510 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96081b17-acc8-4700-91da-9a966b7e7f1c-combined-ca-bundle\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.411441 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m2n7\" (UniqueName: \"kubernetes.io/projected/96081b17-acc8-4700-91da-9a966b7e7f1c-kube-api-access-9m2n7\") pod \"neutron-7cccf5fc8f-zbdml\" (UID: \"96081b17-acc8-4700-91da-9a966b7e7f1c\") " pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.551124 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.886363 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74b8965846-bhqk4" event={"ID":"354f3f63-4e99-4cd3-8523-8388ad02ae4d","Type":"ContainerStarted","Data":"2f5ccb92e316b5240ab5ba30a2eef79694538710d7de659b1ef2b72592184aae"} Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.887617 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.893776 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" event={"ID":"ecc0abab-30fa-418a-ac1f-f7f3d288c33d","Type":"ContainerStarted","Data":"24aae85cbc1753e135b85726e5d8fdb8df020bc9e84aeac9328333bd38c193ba"} Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.894929 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.926601 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-74b8965846-bhqk4" podStartSLOduration=2.926578639 podStartE2EDuration="2.926578639s" podCreationTimestamp="2026-01-30 21:50:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:54.902482162 +0000 UTC m=+2043.694383418" watchObservedRunningTime="2026-01-30 21:50:54.926578639 +0000 UTC m=+2043.718479885" Jan 30 21:50:54 crc kubenswrapper[4721]: I0130 21:50:54.936558 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" podStartSLOduration=3.936535757 podStartE2EDuration="3.936535757s" podCreationTimestamp="2026-01-30 21:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:54.932214784 +0000 UTC m=+2043.724116030" watchObservedRunningTime="2026-01-30 21:50:54.936535757 +0000 UTC m=+2043.728437003" Jan 30 21:50:55 crc kubenswrapper[4721]: I0130 21:50:55.429715 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7cccf5fc8f-zbdml"] Jan 30 21:50:55 crc kubenswrapper[4721]: I0130 21:50:55.904934 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerStarted","Data":"108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0"} Jan 30 21:50:55 crc kubenswrapper[4721]: I0130 21:50:55.906234 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7cccf5fc8f-zbdml" event={"ID":"96081b17-acc8-4700-91da-9a966b7e7f1c","Type":"ContainerStarted","Data":"ce8f25a93c6ad519fdd90927533fdc3e93aaf4c4224f9f61939a5d41703d7a22"} Jan 30 21:50:55 crc kubenswrapper[4721]: I0130 21:50:55.906419 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7cccf5fc8f-zbdml" event={"ID":"96081b17-acc8-4700-91da-9a966b7e7f1c","Type":"ContainerStarted","Data":"ae0fbe4adc1448d239287e4c973eaef88f8150d159c1974849623f90c1c99e3e"} Jan 30 21:50:56 crc kubenswrapper[4721]: I0130 21:50:56.918444 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7cccf5fc8f-zbdml" event={"ID":"96081b17-acc8-4700-91da-9a966b7e7f1c","Type":"ContainerStarted","Data":"6b06f02cc967f15c28293f48bc97c8799ef76dc0507b5fdfe692b42a013744f1"} Jan 30 21:50:56 crc kubenswrapper[4721]: I0130 21:50:56.919878 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:50:56 crc kubenswrapper[4721]: I0130 21:50:56.958218 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7cccf5fc8f-zbdml" podStartSLOduration=2.958191158 podStartE2EDuration="2.958191158s" podCreationTimestamp="2026-01-30 21:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:50:56.947873868 +0000 UTC m=+2045.739775114" watchObservedRunningTime="2026-01-30 21:50:56.958191158 +0000 UTC m=+2045.750092414" Jan 30 21:50:57 crc kubenswrapper[4721]: I0130 21:50:57.964130 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerStarted","Data":"5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f"} Jan 30 21:50:57 crc kubenswrapper[4721]: I0130 21:50:57.964657 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-central-agent" containerID="cri-o://9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a" gracePeriod=30 Jan 30 21:50:57 crc kubenswrapper[4721]: I0130 21:50:57.965216 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="proxy-httpd" containerID="cri-o://5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f" gracePeriod=30 Jan 30 21:50:57 crc kubenswrapper[4721]: I0130 21:50:57.965275 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="sg-core" containerID="cri-o://108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0" gracePeriod=30 Jan 30 21:50:57 crc kubenswrapper[4721]: I0130 21:50:57.965334 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-notification-agent" containerID="cri-o://37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157" gracePeriod=30 Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.001200 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.248935172 podStartE2EDuration="10.001177921s" podCreationTimestamp="2026-01-30 21:50:48 +0000 UTC" firstStartedPulling="2026-01-30 21:50:49.710936663 +0000 UTC m=+2038.502837909" lastFinishedPulling="2026-01-30 21:50:57.463179402 +0000 UTC m=+2046.255080658" observedRunningTime="2026-01-30 21:50:57.998156927 +0000 UTC m=+2046.790058173" watchObservedRunningTime="2026-01-30 21:50:58.001177921 +0000 UTC m=+2046.793079167" Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.314029 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.314284 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-log" containerID="cri-o://bd78361dbf0c20ca745e3caa7cfbfe205c0893edd4e259a868e5416dc94e1d55" gracePeriod=30 Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.314430 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-httpd" containerID="cri-o://333e38ede45fb6e36c3f7950a1d5ce2ea248cff09092e32045ff9bdbd34cb9e6" gracePeriod=30 Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.982413 4721 generic.go:334] "Generic (PLEG): container finished" podID="1c10a349-defd-4a05-a317-a392fad3219f" containerID="bd78361dbf0c20ca745e3caa7cfbfe205c0893edd4e259a868e5416dc94e1d55" exitCode=143 Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.982791 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1c10a349-defd-4a05-a317-a392fad3219f","Type":"ContainerDied","Data":"bd78361dbf0c20ca745e3caa7cfbfe205c0893edd4e259a868e5416dc94e1d55"} Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.987917 4721 generic.go:334] "Generic (PLEG): container finished" podID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerID="108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0" exitCode=2 Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.987958 4721 generic.go:334] "Generic (PLEG): container finished" podID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerID="37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157" exitCode=0 Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.987979 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerDied","Data":"108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0"} Jan 30 21:50:58 crc kubenswrapper[4721]: I0130 21:50:58.988009 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerDied","Data":"37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157"} Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.789118 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-sdf5s"] Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.791293 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.800859 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-sdf5s"] Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.893835 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-7j762"] Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.898843 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.910549 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnfhp\" (UniqueName: \"kubernetes.io/projected/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-kube-api-access-fnfhp\") pod \"nova-api-db-create-sdf5s\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.910786 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-operator-scripts\") pod \"nova-api-db-create-sdf5s\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.924230 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0bc8-account-create-update-8pnn2"] Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.925666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.928150 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.946383 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-7j762"] Jan 30 21:50:59 crc kubenswrapper[4721]: I0130 21:50:59.966392 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0bc8-account-create-update-8pnn2"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.012583 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnfhp\" (UniqueName: \"kubernetes.io/projected/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-kube-api-access-fnfhp\") pod \"nova-api-db-create-sdf5s\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.012738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e87c63f-6379-491e-9efe-7255f5ed3ed0-operator-scripts\") pod \"nova-cell0-db-create-7j762\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.012780 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-operator-scripts\") pod \"nova-api-db-create-sdf5s\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.013606 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsmjl\" (UniqueName: \"kubernetes.io/projected/7e87c63f-6379-491e-9efe-7255f5ed3ed0-kube-api-access-vsmjl\") pod \"nova-cell0-db-create-7j762\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.014084 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-operator-scripts\") pod \"nova-api-db-create-sdf5s\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.035297 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnfhp\" (UniqueName: \"kubernetes.io/projected/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-kube-api-access-fnfhp\") pod \"nova-api-db-create-sdf5s\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.110621 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-tk9hs"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.126032 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.127504 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e87c63f-6379-491e-9efe-7255f5ed3ed0-operator-scripts\") pod \"nova-cell0-db-create-7j762\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.127585 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsmjl\" (UniqueName: \"kubernetes.io/projected/7e87c63f-6379-491e-9efe-7255f5ed3ed0-kube-api-access-vsmjl\") pod \"nova-cell0-db-create-7j762\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.127744 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttnb2\" (UniqueName: \"kubernetes.io/projected/3ed707d1-61ef-47a3-b1ae-71e81502a76d-kube-api-access-ttnb2\") pod \"nova-api-0bc8-account-create-update-8pnn2\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.127882 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ed707d1-61ef-47a3-b1ae-71e81502a76d-operator-scripts\") pod \"nova-api-0bc8-account-create-update-8pnn2\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.138187 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e87c63f-6379-491e-9efe-7255f5ed3ed0-operator-scripts\") pod \"nova-cell0-db-create-7j762\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.139862 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.177423 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsmjl\" (UniqueName: \"kubernetes.io/projected/7e87c63f-6379-491e-9efe-7255f5ed3ed0-kube-api-access-vsmjl\") pod \"nova-cell0-db-create-7j762\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.189576 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-tk9hs"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.210912 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-7d3d-account-create-update-ks6zf"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.217787 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.219965 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-7d3d-account-create-update-ks6zf"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.220925 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.228327 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.266722 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttnb2\" (UniqueName: \"kubernetes.io/projected/3ed707d1-61ef-47a3-b1ae-71e81502a76d-kube-api-access-ttnb2\") pod \"nova-api-0bc8-account-create-update-8pnn2\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.266959 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ed707d1-61ef-47a3-b1ae-71e81502a76d-operator-scripts\") pod \"nova-api-0bc8-account-create-update-8pnn2\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.271013 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ed707d1-61ef-47a3-b1ae-71e81502a76d-operator-scripts\") pod \"nova-api-0bc8-account-create-update-8pnn2\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.311581 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttnb2\" (UniqueName: \"kubernetes.io/projected/3ed707d1-61ef-47a3-b1ae-71e81502a76d-kube-api-access-ttnb2\") pod \"nova-api-0bc8-account-create-update-8pnn2\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.314012 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-3caa-account-create-update-hwv8l"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.316196 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.318499 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.345182 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3caa-account-create-update-hwv8l"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.372449 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbf77\" (UniqueName: \"kubernetes.io/projected/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-kube-api-access-vbf77\") pod \"nova-cell1-db-create-tk9hs\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.372817 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a477f1e-4acc-47ef-ad6a-4a385e57a383-operator-scripts\") pod \"nova-cell0-7d3d-account-create-update-ks6zf\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.373039 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-operator-scripts\") pod \"nova-cell1-db-create-tk9hs\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.373143 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kldj\" (UniqueName: \"kubernetes.io/projected/4a477f1e-4acc-47ef-ad6a-4a385e57a383-kube-api-access-7kldj\") pod \"nova-cell0-7d3d-account-create-update-ks6zf\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.475722 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a477f1e-4acc-47ef-ad6a-4a385e57a383-operator-scripts\") pod \"nova-cell0-7d3d-account-create-update-ks6zf\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.476132 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e94b83d-72f0-4926-ba44-97d328e9088e-operator-scripts\") pod \"nova-cell1-3caa-account-create-update-hwv8l\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.476188 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-operator-scripts\") pod \"nova-cell1-db-create-tk9hs\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.476242 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kldj\" (UniqueName: \"kubernetes.io/projected/4a477f1e-4acc-47ef-ad6a-4a385e57a383-kube-api-access-7kldj\") pod \"nova-cell0-7d3d-account-create-update-ks6zf\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.476274 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbf77\" (UniqueName: \"kubernetes.io/projected/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-kube-api-access-vbf77\") pod \"nova-cell1-db-create-tk9hs\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.476350 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6qmh\" (UniqueName: \"kubernetes.io/projected/5e94b83d-72f0-4926-ba44-97d328e9088e-kube-api-access-b6qmh\") pod \"nova-cell1-3caa-account-create-update-hwv8l\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.478851 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a477f1e-4acc-47ef-ad6a-4a385e57a383-operator-scripts\") pod \"nova-cell0-7d3d-account-create-update-ks6zf\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.479136 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-operator-scripts\") pod \"nova-cell1-db-create-tk9hs\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.498229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbf77\" (UniqueName: \"kubernetes.io/projected/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-kube-api-access-vbf77\") pod \"nova-cell1-db-create-tk9hs\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.506929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kldj\" (UniqueName: \"kubernetes.io/projected/4a477f1e-4acc-47ef-ad6a-4a385e57a383-kube-api-access-7kldj\") pod \"nova-cell0-7d3d-account-create-update-ks6zf\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.572784 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.580036 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6qmh\" (UniqueName: \"kubernetes.io/projected/5e94b83d-72f0-4926-ba44-97d328e9088e-kube-api-access-b6qmh\") pod \"nova-cell1-3caa-account-create-update-hwv8l\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.580161 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e94b83d-72f0-4926-ba44-97d328e9088e-operator-scripts\") pod \"nova-cell1-3caa-account-create-update-hwv8l\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.580916 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e94b83d-72f0-4926-ba44-97d328e9088e-operator-scripts\") pod \"nova-cell1-3caa-account-create-update-hwv8l\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.595817 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.608966 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6qmh\" (UniqueName: \"kubernetes.io/projected/5e94b83d-72f0-4926-ba44-97d328e9088e-kube-api-access-b6qmh\") pod \"nova-cell1-3caa-account-create-update-hwv8l\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.693979 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.694238 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-log" containerID="cri-o://bd8207409fec4dde97a473bbdfb80877b921a2bf6bd9eb33ea48020e7f72abdb" gracePeriod=30 Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.694603 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-httpd" containerID="cri-o://b07c6536bdc26a5e54b1a16711ad9ac5b7ad43a8d189e4b09714c1def64f2899" gracePeriod=30 Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.714119 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.730094 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:00 crc kubenswrapper[4721]: I0130 21:51:00.873732 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-sdf5s"] Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.047856 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-7j762"] Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.090606 4721 generic.go:334] "Generic (PLEG): container finished" podID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerID="bd8207409fec4dde97a473bbdfb80877b921a2bf6bd9eb33ea48020e7f72abdb" exitCode=143 Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.090650 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"827a4c80-2e4a-4f13-a78c-1583f776cd6c","Type":"ContainerDied","Data":"bd8207409fec4dde97a473bbdfb80877b921a2bf6bd9eb33ea48020e7f72abdb"} Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.451900 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-tk9hs"] Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.673685 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0bc8-account-create-update-8pnn2"] Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.885284 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3caa-account-create-update-hwv8l"] Jan 30 21:51:01 crc kubenswrapper[4721]: I0130 21:51:01.898492 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-7d3d-account-create-update-ks6zf"] Jan 30 21:51:01 crc kubenswrapper[4721]: W0130 21:51:01.906941 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e94b83d_72f0_4926_ba44_97d328e9088e.slice/crio-359a4ef2b4ebb3046a31d972afe240fe14caace7db59b8e41ee5fc2019d98c26 WatchSource:0}: Error finding container 359a4ef2b4ebb3046a31d972afe240fe14caace7db59b8e41ee5fc2019d98c26: Status 404 returned error can't find the container with id 359a4ef2b4ebb3046a31d972afe240fe14caace7db59b8e41ee5fc2019d98c26 Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.104429 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" event={"ID":"3ed707d1-61ef-47a3-b1ae-71e81502a76d","Type":"ContainerStarted","Data":"9bbd14fc93377b1e067f1633428990da2b17bd93dcf686eff06e5c2c4e7df9c2"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.104470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" event={"ID":"4a477f1e-4acc-47ef-ad6a-4a385e57a383","Type":"ContainerStarted","Data":"2871dfa4bcbef03f695cd56a7cbec5c0b1c4c8d5fd6b672dc4ff30042f5ddc52"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.104482 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" event={"ID":"5e94b83d-72f0-4926-ba44-97d328e9088e","Type":"ContainerStarted","Data":"359a4ef2b4ebb3046a31d972afe240fe14caace7db59b8e41ee5fc2019d98c26"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.106381 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-sdf5s" event={"ID":"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4","Type":"ContainerStarted","Data":"e7c8ddee0178ee93ee7168f5f7f172c1b917ef632e72ff56741a2da50499bef6"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.106423 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-sdf5s" event={"ID":"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4","Type":"ContainerStarted","Data":"5d3a975d92590ccd36221bb5ed641110ff411f86d74fc710c6fac8b65629c5a9"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.108244 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j762" event={"ID":"7e87c63f-6379-491e-9efe-7255f5ed3ed0","Type":"ContainerStarted","Data":"99d5382d8e9e84fb66f5e1b07e327cd096750ffc3c147ba0c8c8cdaea6b96a15"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.108324 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j762" event={"ID":"7e87c63f-6379-491e-9efe-7255f5ed3ed0","Type":"ContainerStarted","Data":"18572fdcd72e84d2003ca8b2464458985e14f6b127c27bd8ebad339f8f90ebb1"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.110670 4721 generic.go:334] "Generic (PLEG): container finished" podID="1c10a349-defd-4a05-a317-a392fad3219f" containerID="333e38ede45fb6e36c3f7950a1d5ce2ea248cff09092e32045ff9bdbd34cb9e6" exitCode=0 Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.110724 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1c10a349-defd-4a05-a317-a392fad3219f","Type":"ContainerDied","Data":"333e38ede45fb6e36c3f7950a1d5ce2ea248cff09092e32045ff9bdbd34cb9e6"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.111930 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tk9hs" event={"ID":"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8","Type":"ContainerStarted","Data":"f891d8e14c88d24347597dcb55ef6d58b0c4b69b8bb11109d04cb8f32a0f55c9"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.111963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tk9hs" event={"ID":"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8","Type":"ContainerStarted","Data":"6f72790ac2a5370c5009c572a9a5cb36d77ae233d266d99d5990e99084e05b7a"} Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.212242 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.324122 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-sdf5s" podStartSLOduration=3.324097115 podStartE2EDuration="3.324097115s" podCreationTimestamp="2026-01-30 21:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:02.279136081 +0000 UTC m=+2051.071037327" watchObservedRunningTime="2026-01-30 21:51:02.324097115 +0000 UTC m=+2051.115998361" Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.381661 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-tk9hs" podStartSLOduration=2.38163865 podStartE2EDuration="2.38163865s" podCreationTimestamp="2026-01-30 21:51:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:02.301650119 +0000 UTC m=+2051.093551365" watchObservedRunningTime="2026-01-30 21:51:02.38163865 +0000 UTC m=+2051.173539896" Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.383995 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6585f8c7-9cbpt"] Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.384250 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="dnsmasq-dns" containerID="cri-o://d84e6fe0dc81e7bba3f335390a28e571dd22f28c5e6a19cc0f54061b74af833c" gracePeriod=10 Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.422715 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-7j762" podStartSLOduration=3.422696763 podStartE2EDuration="3.422696763s" podCreationTimestamp="2026-01-30 21:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:02.326727117 +0000 UTC m=+2051.118628373" watchObservedRunningTime="2026-01-30 21:51:02.422696763 +0000 UTC m=+2051.214598019" Jan 30 21:51:02 crc kubenswrapper[4721]: I0130 21:51:02.718289 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.193:5353: connect: connection refused" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.124856 4721 generic.go:334] "Generic (PLEG): container finished" podID="5e94b83d-72f0-4926-ba44-97d328e9088e" containerID="d05507e9c5a5f53858a3c3f1e65f4b32a69c714ffc8432542b7ea9721ce9a8c9" exitCode=0 Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.125264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" event={"ID":"5e94b83d-72f0-4926-ba44-97d328e9088e","Type":"ContainerDied","Data":"d05507e9c5a5f53858a3c3f1e65f4b32a69c714ffc8432542b7ea9721ce9a8c9"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.138006 4721 generic.go:334] "Generic (PLEG): container finished" podID="ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" containerID="f891d8e14c88d24347597dcb55ef6d58b0c4b69b8bb11109d04cb8f32a0f55c9" exitCode=0 Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.138119 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tk9hs" event={"ID":"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8","Type":"ContainerDied","Data":"f891d8e14c88d24347597dcb55ef6d58b0c4b69b8bb11109d04cb8f32a0f55c9"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.144189 4721 generic.go:334] "Generic (PLEG): container finished" podID="7e87c63f-6379-491e-9efe-7255f5ed3ed0" containerID="99d5382d8e9e84fb66f5e1b07e327cd096750ffc3c147ba0c8c8cdaea6b96a15" exitCode=0 Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.144328 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j762" event={"ID":"7e87c63f-6379-491e-9efe-7255f5ed3ed0","Type":"ContainerDied","Data":"99d5382d8e9e84fb66f5e1b07e327cd096750ffc3c147ba0c8c8cdaea6b96a15"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.171570 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1c10a349-defd-4a05-a317-a392fad3219f","Type":"ContainerDied","Data":"14e1a44adb290c69154c4d5d06e0b9f3a1dbf27a1da6bd4ecc1e4de0e904fdab"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.171616 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14e1a44adb290c69154c4d5d06e0b9f3a1dbf27a1da6bd4ecc1e4de0e904fdab" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.176826 4721 generic.go:334] "Generic (PLEG): container finished" podID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerID="d84e6fe0dc81e7bba3f335390a28e571dd22f28c5e6a19cc0f54061b74af833c" exitCode=0 Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.176909 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" event={"ID":"1c3d3653-e283-4f1e-909f-e59ebfcf82f0","Type":"ContainerDied","Data":"d84e6fe0dc81e7bba3f335390a28e571dd22f28c5e6a19cc0f54061b74af833c"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.177166 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" event={"ID":"1c3d3653-e283-4f1e-909f-e59ebfcf82f0","Type":"ContainerDied","Data":"8e54db8409e2eba864fb7b2b767d6a1cc48e542a38e04167e9deae89b72ae06e"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.177256 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e54db8409e2eba864fb7b2b767d6a1cc48e542a38e04167e9deae89b72ae06e" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.179212 4721 generic.go:334] "Generic (PLEG): container finished" podID="3ed707d1-61ef-47a3-b1ae-71e81502a76d" containerID="e642bdec5fe63d6c708cb4bbee967ed89c4c41bde2426c1c9f21709b26519297" exitCode=0 Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.179271 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" event={"ID":"3ed707d1-61ef-47a3-b1ae-71e81502a76d","Type":"ContainerDied","Data":"e642bdec5fe63d6c708cb4bbee967ed89c4c41bde2426c1c9f21709b26519297"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.180668 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" event={"ID":"4a477f1e-4acc-47ef-ad6a-4a385e57a383","Type":"ContainerStarted","Data":"3069cea93ec24c48af1f2dde19d05a825cae0a87d92690aba9bf74208081f5d7"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.184215 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-sdf5s" event={"ID":"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4","Type":"ContainerDied","Data":"e7c8ddee0178ee93ee7168f5f7f172c1b917ef632e72ff56741a2da50499bef6"} Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.184840 4721 generic.go:334] "Generic (PLEG): container finished" podID="ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" containerID="e7c8ddee0178ee93ee7168f5f7f172c1b917ef632e72ff56741a2da50499bef6" exitCode=0 Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.259236 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.265492 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300407 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-svc\") pod \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300485 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-config-data\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300545 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-nb\") pod \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300612 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-public-tls-certs\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300640 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-scripts\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300691 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-config\") pod \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300765 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf8sf\" (UniqueName: \"kubernetes.io/projected/1c10a349-defd-4a05-a317-a392fad3219f-kube-api-access-lf8sf\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300814 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-combined-ca-bundle\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300850 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-logs\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300875 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-swift-storage-0\") pod \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300900 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-sb\") pod \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.300967 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxz59\" (UniqueName: \"kubernetes.io/projected/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-kube-api-access-jxz59\") pod \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\" (UID: \"1c3d3653-e283-4f1e-909f-e59ebfcf82f0\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.301025 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-httpd-run\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.301197 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"1c10a349-defd-4a05-a317-a392fad3219f\" (UID: \"1c10a349-defd-4a05-a317-a392fad3219f\") " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.306679 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.308444 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-logs" (OuterVolumeSpecName: "logs") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.314380 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-kube-api-access-jxz59" (OuterVolumeSpecName: "kube-api-access-jxz59") pod "1c3d3653-e283-4f1e-909f-e59ebfcf82f0" (UID: "1c3d3653-e283-4f1e-909f-e59ebfcf82f0"). InnerVolumeSpecName "kube-api-access-jxz59". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.342980 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c10a349-defd-4a05-a317-a392fad3219f-kube-api-access-lf8sf" (OuterVolumeSpecName: "kube-api-access-lf8sf") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "kube-api-access-lf8sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.368835 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799" (OuterVolumeSpecName: "glance") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "pvc-041364f8-81b5-40ba-86c2-556e83a73799". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.376192 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-scripts" (OuterVolumeSpecName: "scripts") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.403827 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.403857 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf8sf\" (UniqueName: \"kubernetes.io/projected/1c10a349-defd-4a05-a317-a392fad3219f-kube-api-access-lf8sf\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.403868 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.403899 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxz59\" (UniqueName: \"kubernetes.io/projected/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-kube-api-access-jxz59\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.403911 4721 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1c10a349-defd-4a05-a317-a392fad3219f-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.403934 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") on node \"crc\" " Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.467841 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.508602 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.567535 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1c3d3653-e283-4f1e-909f-e59ebfcf82f0" (UID: "1c3d3653-e283-4f1e-909f-e59ebfcf82f0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.577535 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-config" (OuterVolumeSpecName: "config") pod "1c3d3653-e283-4f1e-909f-e59ebfcf82f0" (UID: "1c3d3653-e283-4f1e-909f-e59ebfcf82f0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.600930 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1c3d3653-e283-4f1e-909f-e59ebfcf82f0" (UID: "1c3d3653-e283-4f1e-909f-e59ebfcf82f0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.612729 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.612764 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.612776 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.639356 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1c3d3653-e283-4f1e-909f-e59ebfcf82f0" (UID: "1c3d3653-e283-4f1e-909f-e59ebfcf82f0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.647935 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1c3d3653-e283-4f1e-909f-e59ebfcf82f0" (UID: "1c3d3653-e283-4f1e-909f-e59ebfcf82f0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.648489 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.650512 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-config-data" (OuterVolumeSpecName: "config-data") pod "1c10a349-defd-4a05-a317-a392fad3219f" (UID: "1c10a349-defd-4a05-a317-a392fad3219f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.716779 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.716819 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.716832 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c3d3653-e283-4f1e-909f-e59ebfcf82f0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.716844 4721 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c10a349-defd-4a05-a317-a392fad3219f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.770495 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.770821 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-041364f8-81b5-40ba-86c2-556e83a73799" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799") on node "crc" Jan 30 21:51:03 crc kubenswrapper[4721]: I0130 21:51:03.819865 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.195471 4721 generic.go:334] "Generic (PLEG): container finished" podID="4a477f1e-4acc-47ef-ad6a-4a385e57a383" containerID="3069cea93ec24c48af1f2dde19d05a825cae0a87d92690aba9bf74208081f5d7" exitCode=0 Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.195531 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" event={"ID":"4a477f1e-4acc-47ef-ad6a-4a385e57a383","Type":"ContainerDied","Data":"3069cea93ec24c48af1f2dde19d05a825cae0a87d92690aba9bf74208081f5d7"} Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.198269 4721 generic.go:334] "Generic (PLEG): container finished" podID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerID="b07c6536bdc26a5e54b1a16711ad9ac5b7ad43a8d189e4b09714c1def64f2899" exitCode=0 Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.198652 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"827a4c80-2e4a-4f13-a78c-1583f776cd6c","Type":"ContainerDied","Data":"b07c6536bdc26a5e54b1a16711ad9ac5b7ad43a8d189e4b09714c1def64f2899"} Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.198904 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.199839 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6585f8c7-9cbpt" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.313844 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6585f8c7-9cbpt"] Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.352520 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6585f8c7-9cbpt"] Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.396072 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.407876 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421141 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:51:04 crc kubenswrapper[4721]: E0130 21:51:04.421644 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-log" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421656 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-log" Jan 30 21:51:04 crc kubenswrapper[4721]: E0130 21:51:04.421666 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-httpd" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421673 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-httpd" Jan 30 21:51:04 crc kubenswrapper[4721]: E0130 21:51:04.421703 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="dnsmasq-dns" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421711 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="dnsmasq-dns" Jan 30 21:51:04 crc kubenswrapper[4721]: E0130 21:51:04.421718 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="init" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421724 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="init" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421897 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-httpd" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421911 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c10a349-defd-4a05-a317-a392fad3219f" containerName="glance-log" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.421930 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" containerName="dnsmasq-dns" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.423150 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.437724 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.437992 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.443274 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.541915 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542483 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acb7c332-79bc-432b-b046-248772221388-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542514 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb7c332-79bc-432b-b046-248772221388-logs\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542579 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-config-data\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542624 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5psn4\" (UniqueName: \"kubernetes.io/projected/acb7c332-79bc-432b-b046-248772221388-kube-api-access-5psn4\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542652 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542735 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.542802 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-scripts\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5psn4\" (UniqueName: \"kubernetes.io/projected/acb7c332-79bc-432b-b046-248772221388-kube-api-access-5psn4\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649642 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649693 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649753 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-scripts\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649851 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649957 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acb7c332-79bc-432b-b046-248772221388-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.649984 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb7c332-79bc-432b-b046-248772221388-logs\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.650023 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-config-data\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.653042 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acb7c332-79bc-432b-b046-248772221388-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.653331 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb7c332-79bc-432b-b046-248772221388-logs\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.655776 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.655820 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7f3e3924f4a6ec4e0a2834994592735d1017c71a0fc9cab6b021afe197356bdd/globalmount\"" pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.657165 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.657968 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.658870 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-scripts\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.667288 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb7c332-79bc-432b-b046-248772221388-config-data\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.683737 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5psn4\" (UniqueName: \"kubernetes.io/projected/acb7c332-79bc-432b-b046-248772221388-kube-api-access-5psn4\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.725174 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-041364f8-81b5-40ba-86c2-556e83a73799\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-041364f8-81b5-40ba-86c2-556e83a73799\") pod \"glance-default-external-api-0\" (UID: \"acb7c332-79bc-432b-b046-248772221388\") " pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.768826 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.783940 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853022 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-scripts\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853139 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmwrg\" (UniqueName: \"kubernetes.io/projected/827a4c80-2e4a-4f13-a78c-1583f776cd6c-kube-api-access-tmwrg\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853179 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-logs\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853288 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-combined-ca-bundle\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853355 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-httpd-run\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853550 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853674 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-internal-tls-certs\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.853699 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-config-data\") pod \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\" (UID: \"827a4c80-2e4a-4f13-a78c-1583f776cd6c\") " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.854326 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-logs" (OuterVolumeSpecName: "logs") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.855235 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.900956 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/827a4c80-2e4a-4f13-a78c-1583f776cd6c-kube-api-access-tmwrg" (OuterVolumeSpecName: "kube-api-access-tmwrg") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "kube-api-access-tmwrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.901112 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-scripts" (OuterVolumeSpecName: "scripts") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.934903 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.935938 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db" (OuterVolumeSpecName: "glance") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "pvc-84c5d821-7abb-4e56-9189-5550dfb556db". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957106 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-config-data" (OuterVolumeSpecName: "config-data") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957804 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957852 4721 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957895 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") on node \"crc\" " Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957913 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957929 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957942 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmwrg\" (UniqueName: \"kubernetes.io/projected/827a4c80-2e4a-4f13-a78c-1583f776cd6c-kube-api-access-tmwrg\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.957963 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/827a4c80-2e4a-4f13-a78c-1583f776cd6c-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.987415 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "827a4c80-2e4a-4f13-a78c-1583f776cd6c" (UID: "827a4c80-2e4a-4f13-a78c-1583f776cd6c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.991329 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:51:04 crc kubenswrapper[4721]: I0130 21:51:04.991512 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-84c5d821-7abb-4e56-9189-5550dfb556db" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db") on node "crc" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.001085 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.059734 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a477f1e-4acc-47ef-ad6a-4a385e57a383-operator-scripts\") pod \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.060408 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kldj\" (UniqueName: \"kubernetes.io/projected/4a477f1e-4acc-47ef-ad6a-4a385e57a383-kube-api-access-7kldj\") pod \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\" (UID: \"4a477f1e-4acc-47ef-ad6a-4a385e57a383\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.060647 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.060884 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a477f1e-4acc-47ef-ad6a-4a385e57a383-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a477f1e-4acc-47ef-ad6a-4a385e57a383" (UID: "4a477f1e-4acc-47ef-ad6a-4a385e57a383"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.061605 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.061624 4721 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/827a4c80-2e4a-4f13-a78c-1583f776cd6c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.061634 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a477f1e-4acc-47ef-ad6a-4a385e57a383-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.066994 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a477f1e-4acc-47ef-ad6a-4a385e57a383-kube-api-access-7kldj" (OuterVolumeSpecName: "kube-api-access-7kldj") pod "4a477f1e-4acc-47ef-ad6a-4a385e57a383" (UID: "4a477f1e-4acc-47ef-ad6a-4a385e57a383"). InnerVolumeSpecName "kube-api-access-7kldj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.069167 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.162556 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-operator-scripts\") pod \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.162614 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-operator-scripts\") pod \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.162681 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnfhp\" (UniqueName: \"kubernetes.io/projected/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-kube-api-access-fnfhp\") pod \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\" (UID: \"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.162708 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbf77\" (UniqueName: \"kubernetes.io/projected/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-kube-api-access-vbf77\") pod \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\" (UID: \"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.163237 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kldj\" (UniqueName: \"kubernetes.io/projected/4a477f1e-4acc-47ef-ad6a-4a385e57a383-kube-api-access-7kldj\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.164640 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" (UID: "ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.164991 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" (UID: "ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.169667 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-kube-api-access-vbf77" (OuterVolumeSpecName: "kube-api-access-vbf77") pod "ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" (UID: "ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8"). InnerVolumeSpecName "kube-api-access-vbf77". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.169862 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-kube-api-access-fnfhp" (OuterVolumeSpecName: "kube-api-access-fnfhp") pod "ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" (UID: "ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4"). InnerVolumeSpecName "kube-api-access-fnfhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.212634 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tk9hs" event={"ID":"ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8","Type":"ContainerDied","Data":"6f72790ac2a5370c5009c572a9a5cb36d77ae233d266d99d5990e99084e05b7a"} Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.212685 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f72790ac2a5370c5009c572a9a5cb36d77ae233d266d99d5990e99084e05b7a" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.212771 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tk9hs" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.221552 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.221651 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7d3d-account-create-update-ks6zf" event={"ID":"4a477f1e-4acc-47ef-ad6a-4a385e57a383","Type":"ContainerDied","Data":"2871dfa4bcbef03f695cd56a7cbec5c0b1c4c8d5fd6b672dc4ff30042f5ddc52"} Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.221702 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2871dfa4bcbef03f695cd56a7cbec5c0b1c4c8d5fd6b672dc4ff30042f5ddc52" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.260498 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"827a4c80-2e4a-4f13-a78c-1583f776cd6c","Type":"ContainerDied","Data":"de13d489e8b641710d8e8adb557b41227fbc29555ccae4a351740363c002cc53"} Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.260574 4721 scope.go:117] "RemoveContainer" containerID="b07c6536bdc26a5e54b1a16711ad9ac5b7ad43a8d189e4b09714c1def64f2899" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.260782 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.266061 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.266093 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.266104 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnfhp\" (UniqueName: \"kubernetes.io/projected/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4-kube-api-access-fnfhp\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.266115 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbf77\" (UniqueName: \"kubernetes.io/projected/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8-kube-api-access-vbf77\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.274186 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-sdf5s" event={"ID":"ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4","Type":"ContainerDied","Data":"5d3a975d92590ccd36221bb5ed641110ff411f86d74fc710c6fac8b65629c5a9"} Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.274234 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d3a975d92590ccd36221bb5ed641110ff411f86d74fc710c6fac8b65629c5a9" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.274331 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-sdf5s" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.372904 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.376519 4721 scope.go:117] "RemoveContainer" containerID="bd8207409fec4dde97a473bbdfb80877b921a2bf6bd9eb33ea48020e7f72abdb" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.390358 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.444534 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:51:05 crc kubenswrapper[4721]: E0130 21:51:05.445137 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" containerName="mariadb-database-create" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445154 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" containerName="mariadb-database-create" Jan 30 21:51:05 crc kubenswrapper[4721]: E0130 21:51:05.445163 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a477f1e-4acc-47ef-ad6a-4a385e57a383" containerName="mariadb-account-create-update" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445174 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a477f1e-4acc-47ef-ad6a-4a385e57a383" containerName="mariadb-account-create-update" Jan 30 21:51:05 crc kubenswrapper[4721]: E0130 21:51:05.445186 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-log" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445195 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-log" Jan 30 21:51:05 crc kubenswrapper[4721]: E0130 21:51:05.445243 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-httpd" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445252 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-httpd" Jan 30 21:51:05 crc kubenswrapper[4721]: E0130 21:51:05.445266 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" containerName="mariadb-database-create" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445273 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" containerName="mariadb-database-create" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445588 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-httpd" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445624 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a477f1e-4acc-47ef-ad6a-4a385e57a383" containerName="mariadb-account-create-update" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445637 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" containerName="mariadb-database-create" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445652 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" containerName="glance-log" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.445664 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" containerName="mariadb-database-create" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.447096 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.458667 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.458974 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.461845 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481257 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481403 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481451 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481490 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481516 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481564 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481609 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2crh\" (UniqueName: \"kubernetes.io/projected/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-kube-api-access-g2crh\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.481670 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-logs\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583746 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583813 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583861 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583896 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583919 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583958 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.583999 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2crh\" (UniqueName: \"kubernetes.io/projected/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-kube-api-access-g2crh\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.584046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-logs\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.584846 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-logs\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.585125 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.591277 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.591328 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a2a66a61f2645de3c0e9d3f208d5113b5666c55647cddcb1d91cf50b3d6010ba/globalmount\"" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.599635 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.603432 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.605083 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2crh\" (UniqueName: \"kubernetes.io/projected/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-kube-api-access-g2crh\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.604937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.617281 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.658322 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.670952 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-84c5d821-7abb-4e56-9189-5550dfb556db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-84c5d821-7abb-4e56-9189-5550dfb556db\") pod \"glance-default-internal-api-0\" (UID: \"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9\") " pod="openstack/glance-default-internal-api-0" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.681970 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.702678 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.786646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttnb2\" (UniqueName: \"kubernetes.io/projected/3ed707d1-61ef-47a3-b1ae-71e81502a76d-kube-api-access-ttnb2\") pod \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.786692 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsmjl\" (UniqueName: \"kubernetes.io/projected/7e87c63f-6379-491e-9efe-7255f5ed3ed0-kube-api-access-vsmjl\") pod \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.786746 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ed707d1-61ef-47a3-b1ae-71e81502a76d-operator-scripts\") pod \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\" (UID: \"3ed707d1-61ef-47a3-b1ae-71e81502a76d\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.786774 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e87c63f-6379-491e-9efe-7255f5ed3ed0-operator-scripts\") pod \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\" (UID: \"7e87c63f-6379-491e-9efe-7255f5ed3ed0\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.786862 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6qmh\" (UniqueName: \"kubernetes.io/projected/5e94b83d-72f0-4926-ba44-97d328e9088e-kube-api-access-b6qmh\") pod \"5e94b83d-72f0-4926-ba44-97d328e9088e\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.786953 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e94b83d-72f0-4926-ba44-97d328e9088e-operator-scripts\") pod \"5e94b83d-72f0-4926-ba44-97d328e9088e\" (UID: \"5e94b83d-72f0-4926-ba44-97d328e9088e\") " Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.789194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ed707d1-61ef-47a3-b1ae-71e81502a76d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ed707d1-61ef-47a3-b1ae-71e81502a76d" (UID: "3ed707d1-61ef-47a3-b1ae-71e81502a76d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.791362 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e87c63f-6379-491e-9efe-7255f5ed3ed0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e87c63f-6379-491e-9efe-7255f5ed3ed0" (UID: "7e87c63f-6379-491e-9efe-7255f5ed3ed0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.792800 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e94b83d-72f0-4926-ba44-97d328e9088e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5e94b83d-72f0-4926-ba44-97d328e9088e" (UID: "5e94b83d-72f0-4926-ba44-97d328e9088e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.793558 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e94b83d-72f0-4926-ba44-97d328e9088e-kube-api-access-b6qmh" (OuterVolumeSpecName: "kube-api-access-b6qmh") pod "5e94b83d-72f0-4926-ba44-97d328e9088e" (UID: "5e94b83d-72f0-4926-ba44-97d328e9088e"). InnerVolumeSpecName "kube-api-access-b6qmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.795713 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ed707d1-61ef-47a3-b1ae-71e81502a76d-kube-api-access-ttnb2" (OuterVolumeSpecName: "kube-api-access-ttnb2") pod "3ed707d1-61ef-47a3-b1ae-71e81502a76d" (UID: "3ed707d1-61ef-47a3-b1ae-71e81502a76d"). InnerVolumeSpecName "kube-api-access-ttnb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.796035 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e87c63f-6379-491e-9efe-7255f5ed3ed0-kube-api-access-vsmjl" (OuterVolumeSpecName: "kube-api-access-vsmjl") pod "7e87c63f-6379-491e-9efe-7255f5ed3ed0" (UID: "7e87c63f-6379-491e-9efe-7255f5ed3ed0"). InnerVolumeSpecName "kube-api-access-vsmjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.890732 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttnb2\" (UniqueName: \"kubernetes.io/projected/3ed707d1-61ef-47a3-b1ae-71e81502a76d-kube-api-access-ttnb2\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.890772 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsmjl\" (UniqueName: \"kubernetes.io/projected/7e87c63f-6379-491e-9efe-7255f5ed3ed0-kube-api-access-vsmjl\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.890782 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ed707d1-61ef-47a3-b1ae-71e81502a76d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.890796 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e87c63f-6379-491e-9efe-7255f5ed3ed0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.890810 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6qmh\" (UniqueName: \"kubernetes.io/projected/5e94b83d-72f0-4926-ba44-97d328e9088e-kube-api-access-b6qmh\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.890818 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e94b83d-72f0-4926-ba44-97d328e9088e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:05 crc kubenswrapper[4721]: I0130 21:51:05.965088 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.119559 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c10a349-defd-4a05-a317-a392fad3219f" path="/var/lib/kubelet/pods/1c10a349-defd-4a05-a317-a392fad3219f/volumes" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.120666 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c3d3653-e283-4f1e-909f-e59ebfcf82f0" path="/var/lib/kubelet/pods/1c3d3653-e283-4f1e-909f-e59ebfcf82f0/volumes" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.121314 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="827a4c80-2e4a-4f13-a78c-1583f776cd6c" path="/var/lib/kubelet/pods/827a4c80-2e4a-4f13-a78c-1583f776cd6c/volumes" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.215123 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.301156 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7j762" event={"ID":"7e87c63f-6379-491e-9efe-7255f5ed3ed0","Type":"ContainerDied","Data":"18572fdcd72e84d2003ca8b2464458985e14f6b127c27bd8ebad339f8f90ebb1"} Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.301204 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18572fdcd72e84d2003ca8b2464458985e14f6b127c27bd8ebad339f8f90ebb1" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.301289 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7j762" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.316804 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" event={"ID":"3ed707d1-61ef-47a3-b1ae-71e81502a76d","Type":"ContainerDied","Data":"9bbd14fc93377b1e067f1633428990da2b17bd93dcf686eff06e5c2c4e7df9c2"} Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.316852 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bbd14fc93377b1e067f1633428990da2b17bd93dcf686eff06e5c2c4e7df9c2" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.317062 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0bc8-account-create-update-8pnn2" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.349840 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" event={"ID":"5e94b83d-72f0-4926-ba44-97d328e9088e","Type":"ContainerDied","Data":"359a4ef2b4ebb3046a31d972afe240fe14caace7db59b8e41ee5fc2019d98c26"} Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.349902 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="359a4ef2b4ebb3046a31d972afe240fe14caace7db59b8e41ee5fc2019d98c26" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.349992 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3caa-account-create-update-hwv8l" Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.353881 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acb7c332-79bc-432b-b046-248772221388","Type":"ContainerStarted","Data":"dac2c503272fa3bbae13bafde6b2c4c17e69f0e03979d9887f761b87877dff82"} Jan 30 21:51:06 crc kubenswrapper[4721]: I0130 21:51:06.632330 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 30 21:51:07 crc kubenswrapper[4721]: I0130 21:51:07.383776 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9","Type":"ContainerStarted","Data":"24eb6750418d770824c595e7262f6cb854c587f3cfefa6406ad5fda66e90d7f3"} Jan 30 21:51:07 crc kubenswrapper[4721]: I0130 21:51:07.387851 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acb7c332-79bc-432b-b046-248772221388","Type":"ContainerStarted","Data":"e2160c0d3bacf231dfc09249aeb96532efed9f01cc1f4e3636d13cfef9b554f6"} Jan 30 21:51:07 crc kubenswrapper[4721]: I0130 21:51:07.406541 4721 generic.go:334] "Generic (PLEG): container finished" podID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerID="9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a" exitCode=0 Jan 30 21:51:07 crc kubenswrapper[4721]: I0130 21:51:07.406583 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerDied","Data":"9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a"} Jan 30 21:51:08 crc kubenswrapper[4721]: I0130 21:51:08.419719 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9","Type":"ContainerStarted","Data":"b66cf48e04a2d09bd228a4029abab2ac1deb8b65294098bd0716a326efd08187"} Jan 30 21:51:08 crc kubenswrapper[4721]: I0130 21:51:08.420085 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9","Type":"ContainerStarted","Data":"8b896641f54f09128a258e9f3c5133c838c082768da4709d6fc90d3304f164bc"} Jan 30 21:51:08 crc kubenswrapper[4721]: I0130 21:51:08.422239 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acb7c332-79bc-432b-b046-248772221388","Type":"ContainerStarted","Data":"9a40a697698859e8ff1a34d4ee6e16be58e12ed518f10a79caae63c8c6248b36"} Jan 30 21:51:08 crc kubenswrapper[4721]: I0130 21:51:08.443039 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.44301665 podStartE2EDuration="3.44301665s" podCreationTimestamp="2026-01-30 21:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:08.440002917 +0000 UTC m=+2057.231904163" watchObservedRunningTime="2026-01-30 21:51:08.44301665 +0000 UTC m=+2057.234917896" Jan 30 21:51:08 crc kubenswrapper[4721]: I0130 21:51:08.475988 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.4759685319999996 podStartE2EDuration="4.475968532s" podCreationTimestamp="2026-01-30 21:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:08.470227334 +0000 UTC m=+2057.262128580" watchObservedRunningTime="2026-01-30 21:51:08.475968532 +0000 UTC m=+2057.267869778" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.673056 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f2sbl"] Jan 30 21:51:10 crc kubenswrapper[4721]: E0130 21:51:10.675975 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e94b83d-72f0-4926-ba44-97d328e9088e" containerName="mariadb-account-create-update" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.675997 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e94b83d-72f0-4926-ba44-97d328e9088e" containerName="mariadb-account-create-update" Jan 30 21:51:10 crc kubenswrapper[4721]: E0130 21:51:10.676052 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ed707d1-61ef-47a3-b1ae-71e81502a76d" containerName="mariadb-account-create-update" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.676062 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ed707d1-61ef-47a3-b1ae-71e81502a76d" containerName="mariadb-account-create-update" Jan 30 21:51:10 crc kubenswrapper[4721]: E0130 21:51:10.676081 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e87c63f-6379-491e-9efe-7255f5ed3ed0" containerName="mariadb-database-create" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.676088 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e87c63f-6379-491e-9efe-7255f5ed3ed0" containerName="mariadb-database-create" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.676441 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e94b83d-72f0-4926-ba44-97d328e9088e" containerName="mariadb-account-create-update" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.676470 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ed707d1-61ef-47a3-b1ae-71e81502a76d" containerName="mariadb-account-create-update" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.676488 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e87c63f-6379-491e-9efe-7255f5ed3ed0" containerName="mariadb-database-create" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.679144 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.684806 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.685055 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.685162 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ppbhh" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.696697 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f2sbl"] Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.811271 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.811531 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-config-data\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.811918 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-scripts\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.812135 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2znj\" (UniqueName: \"kubernetes.io/projected/33591053-d381-4cec-bd2c-a9ddc8c4778c-kube-api-access-f2znj\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.914044 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-scripts\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.915238 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2znj\" (UniqueName: \"kubernetes.io/projected/33591053-d381-4cec-bd2c-a9ddc8c4778c-kube-api-access-f2znj\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.915346 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.915507 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-config-data\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.920548 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-scripts\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.923067 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-config-data\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.924030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:10 crc kubenswrapper[4721]: I0130 21:51:10.932721 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2znj\" (UniqueName: \"kubernetes.io/projected/33591053-d381-4cec-bd2c-a9ddc8c4778c-kube-api-access-f2znj\") pod \"nova-cell0-conductor-db-sync-f2sbl\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:11 crc kubenswrapper[4721]: I0130 21:51:11.021082 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:11 crc kubenswrapper[4721]: I0130 21:51:11.593091 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f2sbl"] Jan 30 21:51:12 crc kubenswrapper[4721]: I0130 21:51:12.468559 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" event={"ID":"33591053-d381-4cec-bd2c-a9ddc8c4778c","Type":"ContainerStarted","Data":"66b0e9df39d05035bf6f2cb54aecbd1a2ba6afad21d70bdceeb3fad774959efa"} Jan 30 21:51:14 crc kubenswrapper[4721]: I0130 21:51:14.769201 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 21:51:14 crc kubenswrapper[4721]: I0130 21:51:14.769586 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 30 21:51:14 crc kubenswrapper[4721]: I0130 21:51:14.806843 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 21:51:14 crc kubenswrapper[4721]: I0130 21:51:14.814822 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 30 21:51:15 crc kubenswrapper[4721]: I0130 21:51:15.505691 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 21:51:15 crc kubenswrapper[4721]: I0130 21:51:15.506026 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 30 21:51:15 crc kubenswrapper[4721]: I0130 21:51:15.966029 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:15 crc kubenswrapper[4721]: I0130 21:51:15.966086 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:16 crc kubenswrapper[4721]: I0130 21:51:16.025734 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:16 crc kubenswrapper[4721]: I0130 21:51:16.033665 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:16 crc kubenswrapper[4721]: I0130 21:51:16.519693 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:16 crc kubenswrapper[4721]: I0130 21:51:16.519764 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:17 crc kubenswrapper[4721]: I0130 21:51:17.837787 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 21:51:17 crc kubenswrapper[4721]: I0130 21:51:17.838214 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:51:18 crc kubenswrapper[4721]: I0130 21:51:18.163674 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 30 21:51:18 crc kubenswrapper[4721]: I0130 21:51:18.846580 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:18 crc kubenswrapper[4721]: I0130 21:51:18.847612 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 21:51:19 crc kubenswrapper[4721]: I0130 21:51:19.005026 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 30 21:51:19 crc kubenswrapper[4721]: I0130 21:51:19.071776 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:51:19 crc kubenswrapper[4721]: I0130 21:51:19.079214 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 30 21:51:21 crc kubenswrapper[4721]: I0130 21:51:21.569373 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" event={"ID":"33591053-d381-4cec-bd2c-a9ddc8c4778c","Type":"ContainerStarted","Data":"b5dbbe3f5bdbfd64d50612e2745d3b9fc3dc1928748705cee17ba01198bc8c18"} Jan 30 21:51:21 crc kubenswrapper[4721]: I0130 21:51:21.592608 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" podStartSLOduration=2.9465299160000002 podStartE2EDuration="11.592589742s" podCreationTimestamp="2026-01-30 21:51:10 +0000 UTC" firstStartedPulling="2026-01-30 21:51:11.610962628 +0000 UTC m=+2060.402863874" lastFinishedPulling="2026-01-30 21:51:20.257022454 +0000 UTC m=+2069.048923700" observedRunningTime="2026-01-30 21:51:21.588498615 +0000 UTC m=+2070.380399861" watchObservedRunningTime="2026-01-30 21:51:21.592589742 +0000 UTC m=+2070.384490988" Jan 30 21:51:22 crc kubenswrapper[4721]: I0130 21:51:22.164004 4721 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod46c7155a-444a-42b9-9e5d-183998bc5d22"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod46c7155a-444a-42b9-9e5d-183998bc5d22] : Timed out while waiting for systemd to remove kubepods-besteffort-pod46c7155a_444a_42b9_9e5d_183998bc5d22.slice" Jan 30 21:51:22 crc kubenswrapper[4721]: I0130 21:51:22.475074 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:51:22 crc kubenswrapper[4721]: I0130 21:51:22.549933 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Jan 30 21:51:24 crc kubenswrapper[4721]: I0130 21:51:24.580769 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7cccf5fc8f-zbdml" Jan 30 21:51:24 crc kubenswrapper[4721]: I0130 21:51:24.650040 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-74b8965846-bhqk4"] Jan 30 21:51:24 crc kubenswrapper[4721]: I0130 21:51:24.650287 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-74b8965846-bhqk4" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-api" containerID="cri-o://e46eba6388c79d0d350f5351d58264de8bb7a1c1cd6010cbec3dd3e2d921c3e8" gracePeriod=30 Jan 30 21:51:24 crc kubenswrapper[4721]: I0130 21:51:24.650716 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-74b8965846-bhqk4" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-httpd" containerID="cri-o://2f5ccb92e316b5240ab5ba30a2eef79694538710d7de659b1ef2b72592184aae" gracePeriod=30 Jan 30 21:51:25 crc kubenswrapper[4721]: I0130 21:51:25.638802 4721 generic.go:334] "Generic (PLEG): container finished" podID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerID="2f5ccb92e316b5240ab5ba30a2eef79694538710d7de659b1ef2b72592184aae" exitCode=0 Jan 30 21:51:25 crc kubenswrapper[4721]: I0130 21:51:25.638866 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74b8965846-bhqk4" event={"ID":"354f3f63-4e99-4cd3-8523-8388ad02ae4d","Type":"ContainerDied","Data":"2f5ccb92e316b5240ab5ba30a2eef79694538710d7de659b1ef2b72592184aae"} Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.478172 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.625012 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-log-httpd\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.625684 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8fkq\" (UniqueName: \"kubernetes.io/projected/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-kube-api-access-l8fkq\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.625806 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-scripts\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.625941 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-run-httpd\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.626065 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-config-data\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.626226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-sg-core-conf-yaml\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.626487 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-combined-ca-bundle\") pod \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\" (UID: \"8689f2ff-ba73-48c3-bc8c-c77aabb722e4\") " Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.627734 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.628001 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.640275 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-scripts" (OuterVolumeSpecName: "scripts") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.640589 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-kube-api-access-l8fkq" (OuterVolumeSpecName: "kube-api-access-l8fkq") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "kube-api-access-l8fkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.699463 4721 generic.go:334] "Generic (PLEG): container finished" podID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerID="5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f" exitCode=137 Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.699520 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerDied","Data":"5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f"} Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.699553 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8689f2ff-ba73-48c3-bc8c-c77aabb722e4","Type":"ContainerDied","Data":"74635ee03e725704fe049a0c5f8216c8e6d5a7ff540b6d36bed872fcc463b05f"} Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.699577 4721 scope.go:117] "RemoveContainer" containerID="5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.699846 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.700743 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.729192 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.729227 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8fkq\" (UniqueName: \"kubernetes.io/projected/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-kube-api-access-l8fkq\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.729237 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.729247 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.729255 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.734615 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.784044 4721 scope.go:117] "RemoveContainer" containerID="108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.788475 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-config-data" (OuterVolumeSpecName: "config-data") pod "8689f2ff-ba73-48c3-bc8c-c77aabb722e4" (UID: "8689f2ff-ba73-48c3-bc8c-c77aabb722e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.809507 4721 scope.go:117] "RemoveContainer" containerID="37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.831381 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.831412 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8689f2ff-ba73-48c3-bc8c-c77aabb722e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.832584 4721 scope.go:117] "RemoveContainer" containerID="9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.858144 4721 scope.go:117] "RemoveContainer" containerID="5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f" Jan 30 21:51:28 crc kubenswrapper[4721]: E0130 21:51:28.858714 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f\": container with ID starting with 5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f not found: ID does not exist" containerID="5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.858764 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f"} err="failed to get container status \"5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f\": rpc error: code = NotFound desc = could not find container \"5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f\": container with ID starting with 5251940f0eb216bcd9f723a81a8a3d9949d8ba309e69aa87f86832fc9fffa16f not found: ID does not exist" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.858790 4721 scope.go:117] "RemoveContainer" containerID="108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0" Jan 30 21:51:28 crc kubenswrapper[4721]: E0130 21:51:28.859249 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0\": container with ID starting with 108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0 not found: ID does not exist" containerID="108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.859272 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0"} err="failed to get container status \"108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0\": rpc error: code = NotFound desc = could not find container \"108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0\": container with ID starting with 108488a6cafed30b9652e367e404b020631a399d193b2dad62887826116178c0 not found: ID does not exist" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.859286 4721 scope.go:117] "RemoveContainer" containerID="37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157" Jan 30 21:51:28 crc kubenswrapper[4721]: E0130 21:51:28.859648 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157\": container with ID starting with 37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157 not found: ID does not exist" containerID="37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.859824 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157"} err="failed to get container status \"37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157\": rpc error: code = NotFound desc = could not find container \"37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157\": container with ID starting with 37269dd08455c2b3373ea8c1cf56de8ddf65b07edd2eec73ea93d776b4d10157 not found: ID does not exist" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.859954 4721 scope.go:117] "RemoveContainer" containerID="9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a" Jan 30 21:51:28 crc kubenswrapper[4721]: E0130 21:51:28.860423 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a\": container with ID starting with 9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a not found: ID does not exist" containerID="9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a" Jan 30 21:51:28 crc kubenswrapper[4721]: I0130 21:51:28.860525 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a"} err="failed to get container status \"9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a\": rpc error: code = NotFound desc = could not find container \"9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a\": container with ID starting with 9450b8f09509dbf95bdbcf0da4c0409cd6210439e5e6b08eedcfc5cc54e3cc1a not found: ID does not exist" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.050434 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.074227 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.103376 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:29 crc kubenswrapper[4721]: E0130 21:51:29.103928 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-central-agent" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.103947 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-central-agent" Jan 30 21:51:29 crc kubenswrapper[4721]: E0130 21:51:29.103968 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-notification-agent" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.103976 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-notification-agent" Jan 30 21:51:29 crc kubenswrapper[4721]: E0130 21:51:29.103991 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="sg-core" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.103999 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="sg-core" Jan 30 21:51:29 crc kubenswrapper[4721]: E0130 21:51:29.104026 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="proxy-httpd" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.104034 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="proxy-httpd" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.104290 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="proxy-httpd" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.104335 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="sg-core" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.104351 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-notification-agent" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.104368 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" containerName="ceilometer-central-agent" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.106579 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.110756 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.110970 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.112669 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.151793 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-scripts\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.151988 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.152051 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-run-httpd\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.152107 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.152142 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-config-data\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.152263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-log-httpd\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.152388 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsqf9\" (UniqueName: \"kubernetes.io/projected/c8efeacc-1a01-4baa-8de8-b553d09a180b-kube-api-access-vsqf9\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253577 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-run-httpd\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253688 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253714 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-config-data\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253781 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-log-httpd\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253823 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsqf9\" (UniqueName: \"kubernetes.io/projected/c8efeacc-1a01-4baa-8de8-b553d09a180b-kube-api-access-vsqf9\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.253882 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-scripts\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.254689 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-run-httpd\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.254689 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-log-httpd\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.258576 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.258931 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-scripts\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.259484 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-config-data\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.260096 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.276122 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsqf9\" (UniqueName: \"kubernetes.io/projected/c8efeacc-1a01-4baa-8de8-b553d09a180b-kube-api-access-vsqf9\") pod \"ceilometer-0\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.455615 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.727655 4721 generic.go:334] "Generic (PLEG): container finished" podID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerID="e46eba6388c79d0d350f5351d58264de8bb7a1c1cd6010cbec3dd3e2d921c3e8" exitCode=0 Jan 30 21:51:29 crc kubenswrapper[4721]: I0130 21:51:29.727798 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74b8965846-bhqk4" event={"ID":"354f3f63-4e99-4cd3-8523-8388ad02ae4d","Type":"ContainerDied","Data":"e46eba6388c79d0d350f5351d58264de8bb7a1c1cd6010cbec3dd3e2d921c3e8"} Jan 30 21:51:30 crc kubenswrapper[4721]: W0130 21:51:30.004552 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8efeacc_1a01_4baa_8de8_b553d09a180b.slice/crio-01e9c9126f2ef809c9fb32510f551afccde097b88ff4d2430cc4a4755e619821 WatchSource:0}: Error finding container 01e9c9126f2ef809c9fb32510f551afccde097b88ff4d2430cc4a4755e619821: Status 404 returned error can't find the container with id 01e9c9126f2ef809c9fb32510f551afccde097b88ff4d2430cc4a4755e619821 Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.036894 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.144592 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8689f2ff-ba73-48c3-bc8c-c77aabb722e4" path="/var/lib/kubelet/pods/8689f2ff-ba73-48c3-bc8c-c77aabb722e4/volumes" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.205049 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.379093 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-httpd-config\") pod \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.379215 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-config\") pod \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.379411 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-combined-ca-bundle\") pod \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.379463 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6hw8\" (UniqueName: \"kubernetes.io/projected/354f3f63-4e99-4cd3-8523-8388ad02ae4d-kube-api-access-c6hw8\") pod \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.379505 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-ovndb-tls-certs\") pod \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\" (UID: \"354f3f63-4e99-4cd3-8523-8388ad02ae4d\") " Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.385966 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "354f3f63-4e99-4cd3-8523-8388ad02ae4d" (UID: "354f3f63-4e99-4cd3-8523-8388ad02ae4d"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.399490 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/354f3f63-4e99-4cd3-8523-8388ad02ae4d-kube-api-access-c6hw8" (OuterVolumeSpecName: "kube-api-access-c6hw8") pod "354f3f63-4e99-4cd3-8523-8388ad02ae4d" (UID: "354f3f63-4e99-4cd3-8523-8388ad02ae4d"). InnerVolumeSpecName "kube-api-access-c6hw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.435539 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-config" (OuterVolumeSpecName: "config") pod "354f3f63-4e99-4cd3-8523-8388ad02ae4d" (UID: "354f3f63-4e99-4cd3-8523-8388ad02ae4d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.436922 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "354f3f63-4e99-4cd3-8523-8388ad02ae4d" (UID: "354f3f63-4e99-4cd3-8523-8388ad02ae4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.482914 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.482954 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6hw8\" (UniqueName: \"kubernetes.io/projected/354f3f63-4e99-4cd3-8523-8388ad02ae4d-kube-api-access-c6hw8\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.482966 4721 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.482976 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.494226 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "354f3f63-4e99-4cd3-8523-8388ad02ae4d" (UID: "354f3f63-4e99-4cd3-8523-8388ad02ae4d"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.586814 4721 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/354f3f63-4e99-4cd3-8523-8388ad02ae4d-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.742590 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74b8965846-bhqk4" event={"ID":"354f3f63-4e99-4cd3-8523-8388ad02ae4d","Type":"ContainerDied","Data":"467a33d7316cb76c9e45b0680a6f69c33640cdc1b7c8acd5409438d3e29bb5c6"} Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.742649 4721 scope.go:117] "RemoveContainer" containerID="2f5ccb92e316b5240ab5ba30a2eef79694538710d7de659b1ef2b72592184aae" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.742652 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74b8965846-bhqk4" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.747154 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerStarted","Data":"55650b2c783dfcd3d45861052492f5ce0abb756cbd9ddf5adc75572b1645e026"} Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.747185 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerStarted","Data":"01e9c9126f2ef809c9fb32510f551afccde097b88ff4d2430cc4a4755e619821"} Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.787411 4721 scope.go:117] "RemoveContainer" containerID="e46eba6388c79d0d350f5351d58264de8bb7a1c1cd6010cbec3dd3e2d921c3e8" Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.791868 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-74b8965846-bhqk4"] Jan 30 21:51:30 crc kubenswrapper[4721]: I0130 21:51:30.803878 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-74b8965846-bhqk4"] Jan 30 21:51:31 crc kubenswrapper[4721]: I0130 21:51:31.762860 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerStarted","Data":"9ad247e4da1a0562b8d2ce6ca55b2d22244af75953635e1219ed85c70d6c6f1e"} Jan 30 21:51:32 crc kubenswrapper[4721]: I0130 21:51:32.110637 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" path="/var/lib/kubelet/pods/354f3f63-4e99-4cd3-8523-8388ad02ae4d/volumes" Jan 30 21:51:32 crc kubenswrapper[4721]: I0130 21:51:32.255538 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:32 crc kubenswrapper[4721]: I0130 21:51:32.777114 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerStarted","Data":"2be2a1ff2627fef2464c92f1535ec158f74476f77df9890ecd8efa2a0485b2c6"} Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.799838 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerStarted","Data":"6127aef7adfe9ed11fe6b0f757002112642fcf23a8a3d2d86951dbe79fd05303"} Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.800005 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-central-agent" containerID="cri-o://55650b2c783dfcd3d45861052492f5ce0abb756cbd9ddf5adc75572b1645e026" gracePeriod=30 Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.800264 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="proxy-httpd" containerID="cri-o://6127aef7adfe9ed11fe6b0f757002112642fcf23a8a3d2d86951dbe79fd05303" gracePeriod=30 Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.800282 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="sg-core" containerID="cri-o://2be2a1ff2627fef2464c92f1535ec158f74476f77df9890ecd8efa2a0485b2c6" gracePeriod=30 Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.800624 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.800344 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-notification-agent" containerID="cri-o://9ad247e4da1a0562b8d2ce6ca55b2d22244af75953635e1219ed85c70d6c6f1e" gracePeriod=30 Jan 30 21:51:34 crc kubenswrapper[4721]: I0130 21:51:34.827813 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.506916229 podStartE2EDuration="5.8277917s" podCreationTimestamp="2026-01-30 21:51:29 +0000 UTC" firstStartedPulling="2026-01-30 21:51:30.010929424 +0000 UTC m=+2078.802830680" lastFinishedPulling="2026-01-30 21:51:34.331804905 +0000 UTC m=+2083.123706151" observedRunningTime="2026-01-30 21:51:34.823852208 +0000 UTC m=+2083.615753444" watchObservedRunningTime="2026-01-30 21:51:34.8277917 +0000 UTC m=+2083.619692946" Jan 30 21:51:35 crc kubenswrapper[4721]: I0130 21:51:35.812777 4721 generic.go:334] "Generic (PLEG): container finished" podID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerID="6127aef7adfe9ed11fe6b0f757002112642fcf23a8a3d2d86951dbe79fd05303" exitCode=0 Jan 30 21:51:35 crc kubenswrapper[4721]: I0130 21:51:35.813052 4721 generic.go:334] "Generic (PLEG): container finished" podID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerID="2be2a1ff2627fef2464c92f1535ec158f74476f77df9890ecd8efa2a0485b2c6" exitCode=2 Jan 30 21:51:35 crc kubenswrapper[4721]: I0130 21:51:35.813062 4721 generic.go:334] "Generic (PLEG): container finished" podID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerID="9ad247e4da1a0562b8d2ce6ca55b2d22244af75953635e1219ed85c70d6c6f1e" exitCode=0 Jan 30 21:51:35 crc kubenswrapper[4721]: I0130 21:51:35.812833 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerDied","Data":"6127aef7adfe9ed11fe6b0f757002112642fcf23a8a3d2d86951dbe79fd05303"} Jan 30 21:51:35 crc kubenswrapper[4721]: I0130 21:51:35.813099 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerDied","Data":"2be2a1ff2627fef2464c92f1535ec158f74476f77df9890ecd8efa2a0485b2c6"} Jan 30 21:51:35 crc kubenswrapper[4721]: I0130 21:51:35.813113 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerDied","Data":"9ad247e4da1a0562b8d2ce6ca55b2d22244af75953635e1219ed85c70d6c6f1e"} Jan 30 21:51:39 crc kubenswrapper[4721]: I0130 21:51:39.853790 4721 generic.go:334] "Generic (PLEG): container finished" podID="33591053-d381-4cec-bd2c-a9ddc8c4778c" containerID="b5dbbe3f5bdbfd64d50612e2745d3b9fc3dc1928748705cee17ba01198bc8c18" exitCode=0 Jan 30 21:51:39 crc kubenswrapper[4721]: I0130 21:51:39.854438 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" event={"ID":"33591053-d381-4cec-bd2c-a9ddc8c4778c","Type":"ContainerDied","Data":"b5dbbe3f5bdbfd64d50612e2745d3b9fc3dc1928748705cee17ba01198bc8c18"} Jan 30 21:51:40 crc kubenswrapper[4721]: I0130 21:51:40.870922 4721 generic.go:334] "Generic (PLEG): container finished" podID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerID="55650b2c783dfcd3d45861052492f5ce0abb756cbd9ddf5adc75572b1645e026" exitCode=0 Jan 30 21:51:40 crc kubenswrapper[4721]: I0130 21:51:40.871582 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerDied","Data":"55650b2c783dfcd3d45861052492f5ce0abb756cbd9ddf5adc75572b1645e026"} Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.090538 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222012 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-scripts\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222140 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-run-httpd\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222203 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-combined-ca-bundle\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222480 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-log-httpd\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsqf9\" (UniqueName: \"kubernetes.io/projected/c8efeacc-1a01-4baa-8de8-b553d09a180b-kube-api-access-vsqf9\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222553 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-sg-core-conf-yaml\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222565 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.222579 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-config-data\") pod \"c8efeacc-1a01-4baa-8de8-b553d09a180b\" (UID: \"c8efeacc-1a01-4baa-8de8-b553d09a180b\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.223040 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.224622 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.224647 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8efeacc-1a01-4baa-8de8-b553d09a180b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.229586 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8efeacc-1a01-4baa-8de8-b553d09a180b-kube-api-access-vsqf9" (OuterVolumeSpecName: "kube-api-access-vsqf9") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "kube-api-access-vsqf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.229836 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-scripts" (OuterVolumeSpecName: "scripts") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.256106 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.319610 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.323357 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.329272 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsqf9\" (UniqueName: \"kubernetes.io/projected/c8efeacc-1a01-4baa-8de8-b553d09a180b-kube-api-access-vsqf9\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.329339 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.329354 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.329366 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.366820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-config-data" (OuterVolumeSpecName: "config-data") pod "c8efeacc-1a01-4baa-8de8-b553d09a180b" (UID: "c8efeacc-1a01-4baa-8de8-b553d09a180b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.431020 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-scripts\") pod \"33591053-d381-4cec-bd2c-a9ddc8c4778c\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.431200 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-combined-ca-bundle\") pod \"33591053-d381-4cec-bd2c-a9ddc8c4778c\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.431336 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2znj\" (UniqueName: \"kubernetes.io/projected/33591053-d381-4cec-bd2c-a9ddc8c4778c-kube-api-access-f2znj\") pod \"33591053-d381-4cec-bd2c-a9ddc8c4778c\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.431864 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-config-data\") pod \"33591053-d381-4cec-bd2c-a9ddc8c4778c\" (UID: \"33591053-d381-4cec-bd2c-a9ddc8c4778c\") " Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.433092 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8efeacc-1a01-4baa-8de8-b553d09a180b-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.435217 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-scripts" (OuterVolumeSpecName: "scripts") pod "33591053-d381-4cec-bd2c-a9ddc8c4778c" (UID: "33591053-d381-4cec-bd2c-a9ddc8c4778c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.436229 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33591053-d381-4cec-bd2c-a9ddc8c4778c-kube-api-access-f2znj" (OuterVolumeSpecName: "kube-api-access-f2znj") pod "33591053-d381-4cec-bd2c-a9ddc8c4778c" (UID: "33591053-d381-4cec-bd2c-a9ddc8c4778c"). InnerVolumeSpecName "kube-api-access-f2znj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.456632 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33591053-d381-4cec-bd2c-a9ddc8c4778c" (UID: "33591053-d381-4cec-bd2c-a9ddc8c4778c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.480032 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-config-data" (OuterVolumeSpecName: "config-data") pod "33591053-d381-4cec-bd2c-a9ddc8c4778c" (UID: "33591053-d381-4cec-bd2c-a9ddc8c4778c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.534642 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.534685 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.534699 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2znj\" (UniqueName: \"kubernetes.io/projected/33591053-d381-4cec-bd2c-a9ddc8c4778c-kube-api-access-f2znj\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.534709 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33591053-d381-4cec-bd2c-a9ddc8c4778c-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.885491 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8efeacc-1a01-4baa-8de8-b553d09a180b","Type":"ContainerDied","Data":"01e9c9126f2ef809c9fb32510f551afccde097b88ff4d2430cc4a4755e619821"} Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.885561 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.885580 4721 scope.go:117] "RemoveContainer" containerID="6127aef7adfe9ed11fe6b0f757002112642fcf23a8a3d2d86951dbe79fd05303" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.887764 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" event={"ID":"33591053-d381-4cec-bd2c-a9ddc8c4778c","Type":"ContainerDied","Data":"66b0e9df39d05035bf6f2cb54aecbd1a2ba6afad21d70bdceeb3fad774959efa"} Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.887802 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66b0e9df39d05035bf6f2cb54aecbd1a2ba6afad21d70bdceeb3fad774959efa" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.887818 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f2sbl" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.926588 4721 scope.go:117] "RemoveContainer" containerID="2be2a1ff2627fef2464c92f1535ec158f74476f77df9890ecd8efa2a0485b2c6" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.960378 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.962423 4721 scope.go:117] "RemoveContainer" containerID="9ad247e4da1a0562b8d2ce6ca55b2d22244af75953635e1219ed85c70d6c6f1e" Jan 30 21:51:41 crc kubenswrapper[4721]: I0130 21:51:41.974733 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.029337 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030312 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-notification-agent" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030336 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-notification-agent" Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030354 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-httpd" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030361 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-httpd" Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030385 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-api" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030392 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-api" Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030424 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33591053-d381-4cec-bd2c-a9ddc8c4778c" containerName="nova-cell0-conductor-db-sync" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030430 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="33591053-d381-4cec-bd2c-a9ddc8c4778c" containerName="nova-cell0-conductor-db-sync" Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030454 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-central-agent" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030461 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-central-agent" Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030475 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="sg-core" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030481 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="sg-core" Jan 30 21:51:42 crc kubenswrapper[4721]: E0130 21:51:42.030512 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="proxy-httpd" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030519 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="proxy-httpd" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030944 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="proxy-httpd" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.030983 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="33591053-d381-4cec-bd2c-a9ddc8c4778c" containerName="nova-cell0-conductor-db-sync" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.031000 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-central-agent" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.031015 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-httpd" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.031034 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="ceilometer-notification-agent" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.031050 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" containerName="sg-core" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.031074 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="354f3f63-4e99-4cd3-8523-8388ad02ae4d" containerName="neutron-api" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.046980 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.050927 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.054320 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.086547 4721 scope.go:117] "RemoveContainer" containerID="55650b2c783dfcd3d45861052492f5ce0abb756cbd9ddf5adc75572b1645e026" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.122743 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8efeacc-1a01-4baa-8de8-b553d09a180b" path="/var/lib/kubelet/pods/c8efeacc-1a01-4baa-8de8-b553d09a180b/volumes" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.123592 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.123623 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.124873 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.128052 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.128342 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ppbhh" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.128457 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.157482 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfgzq\" (UniqueName: \"kubernetes.io/projected/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-kube-api-access-dfgzq\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.157814 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-scripts\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.157993 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.158279 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-log-httpd\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.158445 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-run-httpd\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.158769 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-config-data\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.158916 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261200 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-run-httpd\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261256 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-config-data\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261288 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261337 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfgzq\" (UniqueName: \"kubernetes.io/projected/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-kube-api-access-dfgzq\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261384 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-scripts\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261422 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqmd5\" (UniqueName: \"kubernetes.io/projected/9252fb34-c918-431a-9945-7478d05e87d2-kube-api-access-mqmd5\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261448 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261477 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.261605 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-log-httpd\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.262096 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-log-httpd\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.262348 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-run-httpd\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.268503 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.268700 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.269020 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-scripts\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.284475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-config-data\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.285357 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfgzq\" (UniqueName: \"kubernetes.io/projected/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-kube-api-access-dfgzq\") pod \"ceilometer-0\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.363418 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqmd5\" (UniqueName: \"kubernetes.io/projected/9252fb34-c918-431a-9945-7478d05e87d2-kube-api-access-mqmd5\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.363487 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.363742 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.368206 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.368492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.386492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqmd5\" (UniqueName: \"kubernetes.io/projected/9252fb34-c918-431a-9945-7478d05e87d2-kube-api-access-mqmd5\") pod \"nova-cell0-conductor-0\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.388129 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.453621 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.870623 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.898400 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerStarted","Data":"a06440e977cdbdf15dac58b9105b711e353068c50f6e40aac35785be2efc772d"} Jan 30 21:51:42 crc kubenswrapper[4721]: I0130 21:51:42.971164 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:42 crc kubenswrapper[4721]: W0130 21:51:42.975815 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9252fb34_c918_431a_9945_7478d05e87d2.slice/crio-2e3a1bffddd6190e34dd3b0b4e34590afbbc75879a3135f56f188d81364aa6b0 WatchSource:0}: Error finding container 2e3a1bffddd6190e34dd3b0b4e34590afbbc75879a3135f56f188d81364aa6b0: Status 404 returned error can't find the container with id 2e3a1bffddd6190e34dd3b0b4e34590afbbc75879a3135f56f188d81364aa6b0 Jan 30 21:51:43 crc kubenswrapper[4721]: I0130 21:51:43.916354 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9252fb34-c918-431a-9945-7478d05e87d2","Type":"ContainerStarted","Data":"a72b0a39c60c083c67a3f8f371784b548699f092d99cf9d7c5b42735394fd277"} Jan 30 21:51:43 crc kubenswrapper[4721]: I0130 21:51:43.916741 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9252fb34-c918-431a-9945-7478d05e87d2","Type":"ContainerStarted","Data":"2e3a1bffddd6190e34dd3b0b4e34590afbbc75879a3135f56f188d81364aa6b0"} Jan 30 21:51:43 crc kubenswrapper[4721]: I0130 21:51:43.917550 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:43 crc kubenswrapper[4721]: I0130 21:51:43.919799 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerStarted","Data":"83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a"} Jan 30 21:51:43 crc kubenswrapper[4721]: I0130 21:51:43.949066 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.949047507 podStartE2EDuration="1.949047507s" podCreationTimestamp="2026-01-30 21:51:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:43.938688855 +0000 UTC m=+2092.730590101" watchObservedRunningTime="2026-01-30 21:51:43.949047507 +0000 UTC m=+2092.740948753" Jan 30 21:51:44 crc kubenswrapper[4721]: I0130 21:51:44.936666 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerStarted","Data":"06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7"} Jan 30 21:51:44 crc kubenswrapper[4721]: I0130 21:51:44.936983 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerStarted","Data":"aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0"} Jan 30 21:51:46 crc kubenswrapper[4721]: I0130 21:51:46.010120 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:46 crc kubenswrapper[4721]: I0130 21:51:46.010630 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="9252fb34-c918-431a-9945-7478d05e87d2" containerName="nova-cell0-conductor-conductor" containerID="cri-o://a72b0a39c60c083c67a3f8f371784b548699f092d99cf9d7c5b42735394fd277" gracePeriod=30 Jan 30 21:51:46 crc kubenswrapper[4721]: I0130 21:51:46.984083 4721 generic.go:334] "Generic (PLEG): container finished" podID="9252fb34-c918-431a-9945-7478d05e87d2" containerID="a72b0a39c60c083c67a3f8f371784b548699f092d99cf9d7c5b42735394fd277" exitCode=0 Jan 30 21:51:46 crc kubenswrapper[4721]: I0130 21:51:46.984129 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9252fb34-c918-431a-9945-7478d05e87d2","Type":"ContainerDied","Data":"a72b0a39c60c083c67a3f8f371784b548699f092d99cf9d7c5b42735394fd277"} Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.230211 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.391734 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqmd5\" (UniqueName: \"kubernetes.io/projected/9252fb34-c918-431a-9945-7478d05e87d2-kube-api-access-mqmd5\") pod \"9252fb34-c918-431a-9945-7478d05e87d2\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.392274 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-config-data\") pod \"9252fb34-c918-431a-9945-7478d05e87d2\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.392448 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-combined-ca-bundle\") pod \"9252fb34-c918-431a-9945-7478d05e87d2\" (UID: \"9252fb34-c918-431a-9945-7478d05e87d2\") " Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.403529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9252fb34-c918-431a-9945-7478d05e87d2-kube-api-access-mqmd5" (OuterVolumeSpecName: "kube-api-access-mqmd5") pod "9252fb34-c918-431a-9945-7478d05e87d2" (UID: "9252fb34-c918-431a-9945-7478d05e87d2"). InnerVolumeSpecName "kube-api-access-mqmd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.428599 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9252fb34-c918-431a-9945-7478d05e87d2" (UID: "9252fb34-c918-431a-9945-7478d05e87d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.432051 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-config-data" (OuterVolumeSpecName: "config-data") pod "9252fb34-c918-431a-9945-7478d05e87d2" (UID: "9252fb34-c918-431a-9945-7478d05e87d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.495375 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqmd5\" (UniqueName: \"kubernetes.io/projected/9252fb34-c918-431a-9945-7478d05e87d2-kube-api-access-mqmd5\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.495423 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.495448 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9252fb34-c918-431a-9945-7478d05e87d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.998252 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerStarted","Data":"2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1"} Jan 30 21:51:47 crc kubenswrapper[4721]: I0130 21:51:47.998437 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.000366 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9252fb34-c918-431a-9945-7478d05e87d2","Type":"ContainerDied","Data":"2e3a1bffddd6190e34dd3b0b4e34590afbbc75879a3135f56f188d81364aa6b0"} Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.000406 4721 scope.go:117] "RemoveContainer" containerID="a72b0a39c60c083c67a3f8f371784b548699f092d99cf9d7c5b42735394fd277" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.000504 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.024251 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.856475226 podStartE2EDuration="7.024225787s" podCreationTimestamp="2026-01-30 21:51:41 +0000 UTC" firstStartedPulling="2026-01-30 21:51:42.875491046 +0000 UTC m=+2091.667392282" lastFinishedPulling="2026-01-30 21:51:47.043241597 +0000 UTC m=+2095.835142843" observedRunningTime="2026-01-30 21:51:48.018118417 +0000 UTC m=+2096.810019663" watchObservedRunningTime="2026-01-30 21:51:48.024225787 +0000 UTC m=+2096.816127043" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.063280 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.078715 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.090647 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.104156 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9252fb34-c918-431a-9945-7478d05e87d2" path="/var/lib/kubelet/pods/9252fb34-c918-431a-9945-7478d05e87d2/volumes" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.119351 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:48 crc kubenswrapper[4721]: E0130 21:51:48.119960 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9252fb34-c918-431a-9945-7478d05e87d2" containerName="nova-cell0-conductor-conductor" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.119987 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9252fb34-c918-431a-9945-7478d05e87d2" containerName="nova-cell0-conductor-conductor" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.120254 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9252fb34-c918-431a-9945-7478d05e87d2" containerName="nova-cell0-conductor-conductor" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.121382 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.127161 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.127515 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ppbhh" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.138676 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.313697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.314190 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.314223 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57zkv\" (UniqueName: \"kubernetes.io/projected/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-kube-api-access-57zkv\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.415823 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.415893 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57zkv\" (UniqueName: \"kubernetes.io/projected/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-kube-api-access-57zkv\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.415993 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.422229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.424986 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.434215 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57zkv\" (UniqueName: \"kubernetes.io/projected/c01ff72c-19ac-4ae7-8c07-d9e0c01c669f-kube-api-access-57zkv\") pod \"nova-cell0-conductor-0\" (UID: \"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f\") " pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.439964 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:48 crc kubenswrapper[4721]: I0130 21:51:48.965921 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 30 21:51:49 crc kubenswrapper[4721]: I0130 21:51:49.026490 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f","Type":"ContainerStarted","Data":"4d04a5ae3d7886e01741a0c0102cf98b26b672855c6879bdaeb91d2cc73952b3"} Jan 30 21:51:50 crc kubenswrapper[4721]: I0130 21:51:50.044504 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c01ff72c-19ac-4ae7-8c07-d9e0c01c669f","Type":"ContainerStarted","Data":"6f33ef7bec2caaa166e6ba382b1249746636f0e57c62c26d8c6bcdcae44212ed"} Jan 30 21:51:50 crc kubenswrapper[4721]: I0130 21:51:50.044731 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-central-agent" containerID="cri-o://83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a" gracePeriod=30 Jan 30 21:51:50 crc kubenswrapper[4721]: I0130 21:51:50.045037 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-notification-agent" containerID="cri-o://aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0" gracePeriod=30 Jan 30 21:51:50 crc kubenswrapper[4721]: I0130 21:51:50.044956 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="sg-core" containerID="cri-o://06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7" gracePeriod=30 Jan 30 21:51:50 crc kubenswrapper[4721]: I0130 21:51:50.045104 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="proxy-httpd" containerID="cri-o://2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1" gracePeriod=30 Jan 30 21:51:50 crc kubenswrapper[4721]: I0130 21:51:50.072510 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.072492832 podStartE2EDuration="2.072492832s" podCreationTimestamp="2026-01-30 21:51:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:51:50.067922451 +0000 UTC m=+2098.859823697" watchObservedRunningTime="2026-01-30 21:51:50.072492832 +0000 UTC m=+2098.864394078" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.059705 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerID="2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1" exitCode=0 Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.060081 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerID="06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7" exitCode=2 Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.060096 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerID="aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0" exitCode=0 Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.059809 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerDied","Data":"2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1"} Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.060231 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerDied","Data":"06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7"} Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.060252 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerDied","Data":"aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0"} Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.060368 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.766152 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.803997 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-sg-core-conf-yaml\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.804084 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-log-httpd\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.804115 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-run-httpd\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.804175 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-scripts\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.804198 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfgzq\" (UniqueName: \"kubernetes.io/projected/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-kube-api-access-dfgzq\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.804281 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-config-data\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.804346 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-combined-ca-bundle\") pod \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\" (UID: \"8ee945cc-6101-4aa6-9a63-20b0aac14ad9\") " Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.805008 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.805096 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.824755 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-scripts" (OuterVolumeSpecName: "scripts") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.824790 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-kube-api-access-dfgzq" (OuterVolumeSpecName: "kube-api-access-dfgzq") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "kube-api-access-dfgzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.839418 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.889610 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.907088 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.907721 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfgzq\" (UniqueName: \"kubernetes.io/projected/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-kube-api-access-dfgzq\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.907740 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.907749 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.907757 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.907766 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:51 crc kubenswrapper[4721]: I0130 21:51:51.924539 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-config-data" (OuterVolumeSpecName: "config-data") pod "8ee945cc-6101-4aa6-9a63-20b0aac14ad9" (UID: "8ee945cc-6101-4aa6-9a63-20b0aac14ad9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.016017 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ee945cc-6101-4aa6-9a63-20b0aac14ad9-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.078886 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerID="83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a" exitCode=0 Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.079020 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.079467 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerDied","Data":"83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a"} Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.079513 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ee945cc-6101-4aa6-9a63-20b0aac14ad9","Type":"ContainerDied","Data":"a06440e977cdbdf15dac58b9105b711e353068c50f6e40aac35785be2efc772d"} Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.079533 4721 scope.go:117] "RemoveContainer" containerID="2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.100699 4721 scope.go:117] "RemoveContainer" containerID="06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.128441 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.129532 4721 scope.go:117] "RemoveContainer" containerID="aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.141700 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.155399 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.155835 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-notification-agent" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.155849 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-notification-agent" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.155860 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="sg-core" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.155866 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="sg-core" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.155890 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-central-agent" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.155896 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-central-agent" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.155909 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="proxy-httpd" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.155915 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="proxy-httpd" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.156106 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-central-agent" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.156118 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="sg-core" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.156133 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="ceilometer-notification-agent" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.156144 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" containerName="proxy-httpd" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.157960 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.160933 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.161878 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.165555 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.165621 4721 scope.go:117] "RemoveContainer" containerID="83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.195278 4721 scope.go:117] "RemoveContainer" containerID="2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.195655 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1\": container with ID starting with 2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1 not found: ID does not exist" containerID="2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.195685 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1"} err="failed to get container status \"2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1\": rpc error: code = NotFound desc = could not find container \"2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1\": container with ID starting with 2daf940c5e6ee87cad3560374a6ca72927a2a77ff32650d136b7b3ea435dbdb1 not found: ID does not exist" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.195710 4721 scope.go:117] "RemoveContainer" containerID="06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.196727 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7\": container with ID starting with 06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7 not found: ID does not exist" containerID="06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.196753 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7"} err="failed to get container status \"06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7\": rpc error: code = NotFound desc = could not find container \"06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7\": container with ID starting with 06d3b362ffc572061ae2bf7b96f4a7c7fce65e9a07d4d7424dc15ff3ffbe9ea7 not found: ID does not exist" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.196766 4721 scope.go:117] "RemoveContainer" containerID="aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.197078 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0\": container with ID starting with aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0 not found: ID does not exist" containerID="aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.197137 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0"} err="failed to get container status \"aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0\": rpc error: code = NotFound desc = could not find container \"aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0\": container with ID starting with aad61fb85e895532ae7c84b84075df42b6767aeda312156719e686c2300bddc0 not found: ID does not exist" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.197172 4721 scope.go:117] "RemoveContainer" containerID="83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a" Jan 30 21:51:52 crc kubenswrapper[4721]: E0130 21:51:52.197453 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a\": container with ID starting with 83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a not found: ID does not exist" containerID="83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.197486 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a"} err="failed to get container status \"83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a\": rpc error: code = NotFound desc = could not find container \"83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a\": container with ID starting with 83dbf74a3a08953bba0e2bb7c665a4a17707d20dbe4ee048a0b3ffe834a0d17a not found: ID does not exist" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.220449 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-run-httpd\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.220517 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-log-httpd\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.220581 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-config-data\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.220751 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pslv9\" (UniqueName: \"kubernetes.io/projected/247013ba-8786-402c-b3aa-30113e5001b4-kube-api-access-pslv9\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.220779 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.220833 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.221029 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-scripts\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.322877 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pslv9\" (UniqueName: \"kubernetes.io/projected/247013ba-8786-402c-b3aa-30113e5001b4-kube-api-access-pslv9\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.322936 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.322957 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.322998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-scripts\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.323029 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-run-httpd\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.323061 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-log-httpd\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.323084 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-config-data\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.324394 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-run-httpd\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.324714 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-log-httpd\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.332939 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.333094 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-scripts\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.337042 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.342750 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-config-data\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.349046 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pslv9\" (UniqueName: \"kubernetes.io/projected/247013ba-8786-402c-b3aa-30113e5001b4-kube-api-access-pslv9\") pod \"ceilometer-0\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " pod="openstack/ceilometer-0" Jan 30 21:51:52 crc kubenswrapper[4721]: I0130 21:51:52.498496 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:51:53 crc kubenswrapper[4721]: I0130 21:51:53.026805 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:51:53 crc kubenswrapper[4721]: W0130 21:51:53.032202 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod247013ba_8786_402c_b3aa_30113e5001b4.slice/crio-29833a64dfc8afbec9c8838448238e924cfcee57bc76452dc05327b65c8cfe77 WatchSource:0}: Error finding container 29833a64dfc8afbec9c8838448238e924cfcee57bc76452dc05327b65c8cfe77: Status 404 returned error can't find the container with id 29833a64dfc8afbec9c8838448238e924cfcee57bc76452dc05327b65c8cfe77 Jan 30 21:51:53 crc kubenswrapper[4721]: I0130 21:51:53.092256 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerStarted","Data":"29833a64dfc8afbec9c8838448238e924cfcee57bc76452dc05327b65c8cfe77"} Jan 30 21:51:54 crc kubenswrapper[4721]: I0130 21:51:54.107762 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ee945cc-6101-4aa6-9a63-20b0aac14ad9" path="/var/lib/kubelet/pods/8ee945cc-6101-4aa6-9a63-20b0aac14ad9/volumes" Jan 30 21:51:55 crc kubenswrapper[4721]: I0130 21:51:55.114648 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerStarted","Data":"29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7"} Jan 30 21:51:56 crc kubenswrapper[4721]: I0130 21:51:56.128594 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerStarted","Data":"80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3"} Jan 30 21:51:56 crc kubenswrapper[4721]: I0130 21:51:56.128942 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerStarted","Data":"7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85"} Jan 30 21:51:58 crc kubenswrapper[4721]: I0130 21:51:58.477892 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 30 21:51:58 crc kubenswrapper[4721]: I0130 21:51:58.953173 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-hs42m"] Jan 30 21:51:58 crc kubenswrapper[4721]: I0130 21:51:58.954947 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:58 crc kubenswrapper[4721]: I0130 21:51:58.958383 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 30 21:51:58 crc kubenswrapper[4721]: I0130 21:51:58.958578 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 30 21:51:58 crc kubenswrapper[4721]: I0130 21:51:58.967085 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hs42m"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.033492 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.033579 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-config-data\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.033606 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmgmd\" (UniqueName: \"kubernetes.io/projected/8c66f646-071a-42e8-b551-18c8fd4c6df4-kube-api-access-zmgmd\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.033704 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-scripts\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.107518 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.109203 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.116868 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.136981 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-scripts\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.137180 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.137281 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-config-data\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.137346 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmgmd\" (UniqueName: \"kubernetes.io/projected/8c66f646-071a-42e8-b551-18c8fd4c6df4-kube-api-access-zmgmd\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.152824 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-config-data\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.153956 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-scripts\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.157078 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.160420 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.201046 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmgmd\" (UniqueName: \"kubernetes.io/projected/8c66f646-071a-42e8-b551-18c8fd4c6df4-kube-api-access-zmgmd\") pod \"nova-cell0-cell-mapping-hs42m\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.205211 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerStarted","Data":"4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122"} Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.206399 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.288526 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.292423 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.293253 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.293367 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snw6r\" (UniqueName: \"kubernetes.io/projected/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-kube-api-access-snw6r\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.293422 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-config-data\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.293453 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-logs\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.294016 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.304800 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.331456 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.348880 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.243323859 podStartE2EDuration="7.348856459s" podCreationTimestamp="2026-01-30 21:51:52 +0000 UTC" firstStartedPulling="2026-01-30 21:51:53.035674969 +0000 UTC m=+2101.827576215" lastFinishedPulling="2026-01-30 21:51:58.141207569 +0000 UTC m=+2106.933108815" observedRunningTime="2026-01-30 21:51:59.236744272 +0000 UTC m=+2108.028645518" watchObservedRunningTime="2026-01-30 21:51:59.348856459 +0000 UTC m=+2108.140757705" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.398655 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.423836 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snw6r\" (UniqueName: \"kubernetes.io/projected/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-kube-api-access-snw6r\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.423889 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.423924 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-config-data\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.424061 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-config-data\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.424113 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-logs\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.424661 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmlhg\" (UniqueName: \"kubernetes.io/projected/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-kube-api-access-cmlhg\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.413724 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.417184 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.426950 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.429763 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-logs\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.440905 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.448214 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.450444 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-config-data\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.470891 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snw6r\" (UniqueName: \"kubernetes.io/projected/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-kube-api-access-snw6r\") pod \"nova-api-0\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.525921 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zrv56"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.527782 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmlhg\" (UniqueName: \"kubernetes.io/projected/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-kube-api-access-cmlhg\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.527885 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.527915 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-config-data\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.534507 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-config-data\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.544718 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.545249 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.548758 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.550058 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.554788 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.567108 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmlhg\" (UniqueName: \"kubernetes.io/projected/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-kube-api-access-cmlhg\") pod \"nova-scheduler-0\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.613716 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zrv56"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.629687 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-config-data\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.629809 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/246e98c4-6bbe-44ef-8d3d-6257c525dad1-logs\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.629842 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.629890 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcrw9\" (UniqueName: \"kubernetes.io/projected/246e98c4-6bbe-44ef-8d3d-6257c525dad1-kube-api-access-wcrw9\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.666364 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.667026 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731577 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731737 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb4pb\" (UniqueName: \"kubernetes.io/projected/617fc783-10bd-4dce-b2ff-3d10bf6451a7-kube-api-access-mb4pb\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731799 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-config\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731846 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/246e98c4-6bbe-44ef-8d3d-6257c525dad1-logs\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731875 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731890 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731933 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcrw9\" (UniqueName: \"kubernetes.io/projected/246e98c4-6bbe-44ef-8d3d-6257c525dad1-kube-api-access-wcrw9\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.731967 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qmr9\" (UniqueName: \"kubernetes.io/projected/ced291de-0920-46ed-a3e0-2c064b072df0-kube-api-access-5qmr9\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.732075 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-svc\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.732154 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.732187 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-config-data\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.732216 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.732238 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.755072 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/246e98c4-6bbe-44ef-8d3d-6257c525dad1-logs\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.760374 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.783378 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-config-data\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.817165 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcrw9\" (UniqueName: \"kubernetes.io/projected/246e98c4-6bbe-44ef-8d3d-6257c525dad1-kube-api-access-wcrw9\") pod \"nova-metadata-0\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838618 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qmr9\" (UniqueName: \"kubernetes.io/projected/ced291de-0920-46ed-a3e0-2c064b072df0-kube-api-access-5qmr9\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838712 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-svc\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838814 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838835 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838865 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838887 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb4pb\" (UniqueName: \"kubernetes.io/projected/617fc783-10bd-4dce-b2ff-3d10bf6451a7-kube-api-access-mb4pb\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838922 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-config\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.838971 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.840520 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.841058 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.841647 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-svc\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.848003 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-config\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.849890 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.857087 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.901959 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.903167 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.933769 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.938667 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qmr9\" (UniqueName: \"kubernetes.io/projected/ced291de-0920-46ed-a3e0-2c064b072df0-kube-api-access-5qmr9\") pod \"dnsmasq-dns-78cd565959-zrv56\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.939195 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb4pb\" (UniqueName: \"kubernetes.io/projected/617fc783-10bd-4dce-b2ff-3d10bf6451a7-kube-api-access-mb4pb\") pod \"nova-cell1-novncproxy-0\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:51:59 crc kubenswrapper[4721]: I0130 21:51:59.957278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.054446 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.336460 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hs42m"] Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.532049 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.624935 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.737723 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-56v9p"] Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.740491 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.743794 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.743809 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.762749 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-56v9p"] Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.790395 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zrv56"] Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.801037 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:00 crc kubenswrapper[4721]: W0130 21:52:00.814890 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97762a1c_ffe6_48d6_ba91_57dabd31b5b8.slice/crio-edc10525b1f93d8ae3843833b1b3deaf57cdaaccc552ae2875ffe15db78f1241 WatchSource:0}: Error finding container edc10525b1f93d8ae3843833b1b3deaf57cdaaccc552ae2875ffe15db78f1241: Status 404 returned error can't find the container with id edc10525b1f93d8ae3843833b1b3deaf57cdaaccc552ae2875ffe15db78f1241 Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.877794 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlwqq\" (UniqueName: \"kubernetes.io/projected/f5db7a32-67d1-48b4-802f-0711a9e32eb2-kube-api-access-tlwqq\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.877885 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-config-data\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.878016 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.878036 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-scripts\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.979749 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-config-data\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.979954 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.979991 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-scripts\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.980042 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlwqq\" (UniqueName: \"kubernetes.io/projected/f5db7a32-67d1-48b4-802f-0711a9e32eb2-kube-api-access-tlwqq\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.991183 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-scripts\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.993131 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:00 crc kubenswrapper[4721]: I0130 21:52:00.999070 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-config-data\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.004517 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlwqq\" (UniqueName: \"kubernetes.io/projected/f5db7a32-67d1-48b4-802f-0711a9e32eb2-kube-api-access-tlwqq\") pod \"nova-cell1-conductor-db-sync-56v9p\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.054687 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.070858 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:01 crc kubenswrapper[4721]: W0130 21:52:01.088441 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod617fc783_10bd_4dce_b2ff_3d10bf6451a7.slice/crio-44cff1c51468ab208a110663c1b6207c100a7ce1f5f809f1243b1f1bdbaf0d63 WatchSource:0}: Error finding container 44cff1c51468ab208a110663c1b6207c100a7ce1f5f809f1243b1f1bdbaf0d63: Status 404 returned error can't find the container with id 44cff1c51468ab208a110663c1b6207c100a7ce1f5f809f1243b1f1bdbaf0d63 Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.260069 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ac830b68-ee1f-4f4f-9906-8d26f9fd0534","Type":"ContainerStarted","Data":"4b1a8fa870dc0e131f59ff9395ad1b8da2749f7a5aeaf45d9a245bf7293dd8e5"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.284912 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"97762a1c-ffe6-48d6-ba91-57dabd31b5b8","Type":"ContainerStarted","Data":"edc10525b1f93d8ae3843833b1b3deaf57cdaaccc552ae2875ffe15db78f1241"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.288995 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hs42m" event={"ID":"8c66f646-071a-42e8-b551-18c8fd4c6df4","Type":"ContainerStarted","Data":"055cbf36022f062ac3abda5376468704eef07db15d7f11d71db669ef8299a6d3"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.289050 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hs42m" event={"ID":"8c66f646-071a-42e8-b551-18c8fd4c6df4","Type":"ContainerStarted","Data":"913f0861eed8d3a19e549d7ae53aa1ea598c74eb2002cf75105cfa62ac7e72b5"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.294580 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zrv56" event={"ID":"ced291de-0920-46ed-a3e0-2c064b072df0","Type":"ContainerStarted","Data":"6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.294665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zrv56" event={"ID":"ced291de-0920-46ed-a3e0-2c064b072df0","Type":"ContainerStarted","Data":"86f9ea9e98edfe17cc068e9209fc47c5f1e39a61e14b7076e6b67b48769b23fa"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.304070 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"617fc783-10bd-4dce-b2ff-3d10bf6451a7","Type":"ContainerStarted","Data":"44cff1c51468ab208a110663c1b6207c100a7ce1f5f809f1243b1f1bdbaf0d63"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.314809 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"246e98c4-6bbe-44ef-8d3d-6257c525dad1","Type":"ContainerStarted","Data":"ecada508e8b4f914616ab4f92de5c56f98da32e202facf6679b8017c0bd6ff79"} Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.316698 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-hs42m" podStartSLOduration=3.31667082 podStartE2EDuration="3.31667082s" podCreationTimestamp="2026-01-30 21:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:01.306561996 +0000 UTC m=+2110.098463242" watchObservedRunningTime="2026-01-30 21:52:01.31667082 +0000 UTC m=+2110.108572086" Jan 30 21:52:01 crc kubenswrapper[4721]: I0130 21:52:01.660611 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-56v9p"] Jan 30 21:52:01 crc kubenswrapper[4721]: W0130 21:52:01.689171 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5db7a32_67d1_48b4_802f_0711a9e32eb2.slice/crio-cd01054e93cb5abf96002acfaa52554db0784e0d7bef0f910029040ba283ae35 WatchSource:0}: Error finding container cd01054e93cb5abf96002acfaa52554db0784e0d7bef0f910029040ba283ae35: Status 404 returned error can't find the container with id cd01054e93cb5abf96002acfaa52554db0784e0d7bef0f910029040ba283ae35 Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.360867 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-56v9p" event={"ID":"f5db7a32-67d1-48b4-802f-0711a9e32eb2","Type":"ContainerStarted","Data":"e618f6d2c31f7c68ad4e3e1479d2259b6e7ed9c9883e137e3960be8c8e935b4d"} Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.361323 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-56v9p" event={"ID":"f5db7a32-67d1-48b4-802f-0711a9e32eb2","Type":"ContainerStarted","Data":"cd01054e93cb5abf96002acfaa52554db0784e0d7bef0f910029040ba283ae35"} Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.364946 4721 generic.go:334] "Generic (PLEG): container finished" podID="ced291de-0920-46ed-a3e0-2c064b072df0" containerID="6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b" exitCode=0 Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.366128 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zrv56" event={"ID":"ced291de-0920-46ed-a3e0-2c064b072df0","Type":"ContainerDied","Data":"6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b"} Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.366174 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.366189 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zrv56" event={"ID":"ced291de-0920-46ed-a3e0-2c064b072df0","Type":"ContainerStarted","Data":"8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861"} Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.410501 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-56v9p" podStartSLOduration=2.410480369 podStartE2EDuration="2.410480369s" podCreationTimestamp="2026-01-30 21:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:02.394460973 +0000 UTC m=+2111.186362219" watchObservedRunningTime="2026-01-30 21:52:02.410480369 +0000 UTC m=+2111.202381615" Jan 30 21:52:02 crc kubenswrapper[4721]: I0130 21:52:02.460701 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78cd565959-zrv56" podStartSLOduration=3.460677336 podStartE2EDuration="3.460677336s" podCreationTimestamp="2026-01-30 21:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:02.422646807 +0000 UTC m=+2111.214548053" watchObservedRunningTime="2026-01-30 21:52:02.460677336 +0000 UTC m=+2111.252578582" Jan 30 21:52:03 crc kubenswrapper[4721]: I0130 21:52:03.275901 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:03 crc kubenswrapper[4721]: I0130 21:52:03.286461 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.427506 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"246e98c4-6bbe-44ef-8d3d-6257c525dad1","Type":"ContainerStarted","Data":"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade"} Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.428048 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"246e98c4-6bbe-44ef-8d3d-6257c525dad1","Type":"ContainerStarted","Data":"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4"} Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.427697 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-log" containerID="cri-o://875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4" gracePeriod=30 Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.428208 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-metadata" containerID="cri-o://65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade" gracePeriod=30 Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.430907 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ac830b68-ee1f-4f4f-9906-8d26f9fd0534","Type":"ContainerStarted","Data":"996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64"} Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.455219 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"97762a1c-ffe6-48d6-ba91-57dabd31b5b8","Type":"ContainerStarted","Data":"103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307"} Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.455280 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"97762a1c-ffe6-48d6-ba91-57dabd31b5b8","Type":"ContainerStarted","Data":"32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3"} Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.485578 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.979232672 podStartE2EDuration="7.485558785s" podCreationTimestamp="2026-01-30 21:51:59 +0000 UTC" firstStartedPulling="2026-01-30 21:52:00.680632011 +0000 UTC m=+2109.472533257" lastFinishedPulling="2026-01-30 21:52:05.186958124 +0000 UTC m=+2113.978859370" observedRunningTime="2026-01-30 21:52:06.468917329 +0000 UTC m=+2115.260818575" watchObservedRunningTime="2026-01-30 21:52:06.485558785 +0000 UTC m=+2115.277460031" Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.486041 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"617fc783-10bd-4dce-b2ff-3d10bf6451a7","Type":"ContainerStarted","Data":"232bda3c110a7fd950109400b3a3c951da38b7594eefff4ba1a383e7c3f2ec64"} Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.486265 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="617fc783-10bd-4dce-b2ff-3d10bf6451a7" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://232bda3c110a7fd950109400b3a3c951da38b7594eefff4ba1a383e7c3f2ec64" gracePeriod=30 Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.498066 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.131761173 podStartE2EDuration="7.498022602s" podCreationTimestamp="2026-01-30 21:51:59 +0000 UTC" firstStartedPulling="2026-01-30 21:52:00.818681472 +0000 UTC m=+2109.610582718" lastFinishedPulling="2026-01-30 21:52:05.184942901 +0000 UTC m=+2113.976844147" observedRunningTime="2026-01-30 21:52:06.486811305 +0000 UTC m=+2115.278712581" watchObservedRunningTime="2026-01-30 21:52:06.498022602 +0000 UTC m=+2115.289923848" Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.533636 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.8814777 podStartE2EDuration="7.533575915s" podCreationTimestamp="2026-01-30 21:51:59 +0000 UTC" firstStartedPulling="2026-01-30 21:52:00.534857299 +0000 UTC m=+2109.326758545" lastFinishedPulling="2026-01-30 21:52:05.186955504 +0000 UTC m=+2113.978856760" observedRunningTime="2026-01-30 21:52:06.502019366 +0000 UTC m=+2115.293920612" watchObservedRunningTime="2026-01-30 21:52:06.533575915 +0000 UTC m=+2115.325477181" Jan 30 21:52:06 crc kubenswrapper[4721]: I0130 21:52:06.540255 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.459248752 podStartE2EDuration="7.540240322s" podCreationTimestamp="2026-01-30 21:51:59 +0000 UTC" firstStartedPulling="2026-01-30 21:52:01.106946884 +0000 UTC m=+2109.898848130" lastFinishedPulling="2026-01-30 21:52:05.187938454 +0000 UTC m=+2113.979839700" observedRunningTime="2026-01-30 21:52:06.516723442 +0000 UTC m=+2115.308624688" watchObservedRunningTime="2026-01-30 21:52:06.540240322 +0000 UTC m=+2115.332141568" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.219882 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.256226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-combined-ca-bundle\") pod \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.256369 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/246e98c4-6bbe-44ef-8d3d-6257c525dad1-logs\") pod \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.256407 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcrw9\" (UniqueName: \"kubernetes.io/projected/246e98c4-6bbe-44ef-8d3d-6257c525dad1-kube-api-access-wcrw9\") pod \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.256476 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-config-data\") pod \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\" (UID: \"246e98c4-6bbe-44ef-8d3d-6257c525dad1\") " Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.256787 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/246e98c4-6bbe-44ef-8d3d-6257c525dad1-logs" (OuterVolumeSpecName: "logs") pod "246e98c4-6bbe-44ef-8d3d-6257c525dad1" (UID: "246e98c4-6bbe-44ef-8d3d-6257c525dad1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.257580 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/246e98c4-6bbe-44ef-8d3d-6257c525dad1-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.276704 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/246e98c4-6bbe-44ef-8d3d-6257c525dad1-kube-api-access-wcrw9" (OuterVolumeSpecName: "kube-api-access-wcrw9") pod "246e98c4-6bbe-44ef-8d3d-6257c525dad1" (UID: "246e98c4-6bbe-44ef-8d3d-6257c525dad1"). InnerVolumeSpecName "kube-api-access-wcrw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.330557 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "246e98c4-6bbe-44ef-8d3d-6257c525dad1" (UID: "246e98c4-6bbe-44ef-8d3d-6257c525dad1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.339430 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-config-data" (OuterVolumeSpecName: "config-data") pod "246e98c4-6bbe-44ef-8d3d-6257c525dad1" (UID: "246e98c4-6bbe-44ef-8d3d-6257c525dad1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.359782 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.359815 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcrw9\" (UniqueName: \"kubernetes.io/projected/246e98c4-6bbe-44ef-8d3d-6257c525dad1-kube-api-access-wcrw9\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.359827 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246e98c4-6bbe-44ef-8d3d-6257c525dad1-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498097 4721 generic.go:334] "Generic (PLEG): container finished" podID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerID="65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade" exitCode=0 Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498140 4721 generic.go:334] "Generic (PLEG): container finished" podID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerID="875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4" exitCode=143 Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498188 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498265 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"246e98c4-6bbe-44ef-8d3d-6257c525dad1","Type":"ContainerDied","Data":"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade"} Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498308 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"246e98c4-6bbe-44ef-8d3d-6257c525dad1","Type":"ContainerDied","Data":"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4"} Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498322 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"246e98c4-6bbe-44ef-8d3d-6257c525dad1","Type":"ContainerDied","Data":"ecada508e8b4f914616ab4f92de5c56f98da32e202facf6679b8017c0bd6ff79"} Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.498337 4721 scope.go:117] "RemoveContainer" containerID="65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.522152 4721 scope.go:117] "RemoveContainer" containerID="875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.547048 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.566594 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.576035 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:07 crc kubenswrapper[4721]: E0130 21:52:07.576584 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-log" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.576605 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-log" Jan 30 21:52:07 crc kubenswrapper[4721]: E0130 21:52:07.576629 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-metadata" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.576636 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-metadata" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.576829 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-metadata" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.576849 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" containerName="nova-metadata-log" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.578241 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.580673 4721 scope.go:117] "RemoveContainer" containerID="65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.581071 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 30 21:52:07 crc kubenswrapper[4721]: E0130 21:52:07.581365 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade\": container with ID starting with 65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade not found: ID does not exist" containerID="65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.581410 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade"} err="failed to get container status \"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade\": rpc error: code = NotFound desc = could not find container \"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade\": container with ID starting with 65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade not found: ID does not exist" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.581442 4721 scope.go:117] "RemoveContainer" containerID="875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.581645 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 21:52:07 crc kubenswrapper[4721]: E0130 21:52:07.581829 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4\": container with ID starting with 875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4 not found: ID does not exist" containerID="875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.581858 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4"} err="failed to get container status \"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4\": rpc error: code = NotFound desc = could not find container \"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4\": container with ID starting with 875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4 not found: ID does not exist" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.581880 4721 scope.go:117] "RemoveContainer" containerID="65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.583055 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade"} err="failed to get container status \"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade\": rpc error: code = NotFound desc = could not find container \"65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade\": container with ID starting with 65ad475a0f822af39af1276da67c8c6d2f7ef389fdf46a0cb62f2145a0653ade not found: ID does not exist" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.583078 4721 scope.go:117] "RemoveContainer" containerID="875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.583451 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4"} err="failed to get container status \"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4\": rpc error: code = NotFound desc = could not find container \"875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4\": container with ID starting with 875c5151a933570fee9d44fdc1df9b11e8bfd0bca670044b8091928f96ae7ac4 not found: ID does not exist" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.586696 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.667244 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98blf\" (UniqueName: \"kubernetes.io/projected/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-kube-api-access-98blf\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.667353 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-config-data\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.667450 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-logs\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.667512 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.667589 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.770052 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98blf\" (UniqueName: \"kubernetes.io/projected/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-kube-api-access-98blf\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.770110 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-config-data\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.770162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-logs\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.770194 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.770233 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.771229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-logs\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.775099 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.778915 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.779002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-config-data\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.793153 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98blf\" (UniqueName: \"kubernetes.io/projected/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-kube-api-access-98blf\") pod \"nova-metadata-0\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " pod="openstack/nova-metadata-0" Jan 30 21:52:07 crc kubenswrapper[4721]: I0130 21:52:07.898386 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:08 crc kubenswrapper[4721]: I0130 21:52:08.117505 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="246e98c4-6bbe-44ef-8d3d-6257c525dad1" path="/var/lib/kubelet/pods/246e98c4-6bbe-44ef-8d3d-6257c525dad1/volumes" Jan 30 21:52:08 crc kubenswrapper[4721]: I0130 21:52:08.372953 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:08 crc kubenswrapper[4721]: I0130 21:52:08.515493 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54","Type":"ContainerStarted","Data":"ab69860a5951ccc639ce0fde15f849edaf04d7f2f2927670de67f0099db638cf"} Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.532408 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54","Type":"ContainerStarted","Data":"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011"} Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.532861 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54","Type":"ContainerStarted","Data":"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945"} Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.534533 4721 generic.go:334] "Generic (PLEG): container finished" podID="8c66f646-071a-42e8-b551-18c8fd4c6df4" containerID="055cbf36022f062ac3abda5376468704eef07db15d7f11d71db669ef8299a6d3" exitCode=0 Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.534604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hs42m" event={"ID":"8c66f646-071a-42e8-b551-18c8fd4c6df4","Type":"ContainerDied","Data":"055cbf36022f062ac3abda5376468704eef07db15d7f11d71db669ef8299a6d3"} Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.560286 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.560264611 podStartE2EDuration="2.560264611s" podCreationTimestamp="2026-01-30 21:52:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:09.552842971 +0000 UTC m=+2118.344744217" watchObservedRunningTime="2026-01-30 21:52:09.560264611 +0000 UTC m=+2118.352165857" Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.668240 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.668558 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.859524 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.859589 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.896616 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 30 21:52:09 crc kubenswrapper[4721]: I0130 21:52:09.959572 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.054783 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.058394 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-nb44g"] Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.058634 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerName="dnsmasq-dns" containerID="cri-o://24aae85cbc1753e135b85726e5d8fdb8df020bc9e84aeac9328333bd38c193ba" gracePeriod=10 Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.564704 4721 generic.go:334] "Generic (PLEG): container finished" podID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerID="24aae85cbc1753e135b85726e5d8fdb8df020bc9e84aeac9328333bd38c193ba" exitCode=0 Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.564830 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" event={"ID":"ecc0abab-30fa-418a-ac1f-f7f3d288c33d","Type":"ContainerDied","Data":"24aae85cbc1753e135b85726e5d8fdb8df020bc9e84aeac9328333bd38c193ba"} Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.631956 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.752530 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.217:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.752634 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.217:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.788830 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.852166 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-swift-storage-0\") pod \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.852258 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-svc\") pod \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.852526 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-sb\") pod \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.852565 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj7q4\" (UniqueName: \"kubernetes.io/projected/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-kube-api-access-kj7q4\") pod \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.852600 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-nb\") pod \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.852701 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-config\") pod \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\" (UID: \"ecc0abab-30fa-418a-ac1f-f7f3d288c33d\") " Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.885621 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-kube-api-access-kj7q4" (OuterVolumeSpecName: "kube-api-access-kj7q4") pod "ecc0abab-30fa-418a-ac1f-f7f3d288c33d" (UID: "ecc0abab-30fa-418a-ac1f-f7f3d288c33d"). InnerVolumeSpecName "kube-api-access-kj7q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.954715 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj7q4\" (UniqueName: \"kubernetes.io/projected/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-kube-api-access-kj7q4\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:10 crc kubenswrapper[4721]: I0130 21:52:10.993394 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ecc0abab-30fa-418a-ac1f-f7f3d288c33d" (UID: "ecc0abab-30fa-418a-ac1f-f7f3d288c33d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.001788 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ecc0abab-30fa-418a-ac1f-f7f3d288c33d" (UID: "ecc0abab-30fa-418a-ac1f-f7f3d288c33d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.008032 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ecc0abab-30fa-418a-ac1f-f7f3d288c33d" (UID: "ecc0abab-30fa-418a-ac1f-f7f3d288c33d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.016007 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-config" (OuterVolumeSpecName: "config") pod "ecc0abab-30fa-418a-ac1f-f7f3d288c33d" (UID: "ecc0abab-30fa-418a-ac1f-f7f3d288c33d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.016797 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ecc0abab-30fa-418a-ac1f-f7f3d288c33d" (UID: "ecc0abab-30fa-418a-ac1f-f7f3d288c33d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.057239 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.057284 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.057318 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.057330 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.057342 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecc0abab-30fa-418a-ac1f-f7f3d288c33d-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.120494 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.158335 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmgmd\" (UniqueName: \"kubernetes.io/projected/8c66f646-071a-42e8-b551-18c8fd4c6df4-kube-api-access-zmgmd\") pod \"8c66f646-071a-42e8-b551-18c8fd4c6df4\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.158870 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-config-data\") pod \"8c66f646-071a-42e8-b551-18c8fd4c6df4\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.158991 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-combined-ca-bundle\") pod \"8c66f646-071a-42e8-b551-18c8fd4c6df4\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.159231 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-scripts\") pod \"8c66f646-071a-42e8-b551-18c8fd4c6df4\" (UID: \"8c66f646-071a-42e8-b551-18c8fd4c6df4\") " Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.163833 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-scripts" (OuterVolumeSpecName: "scripts") pod "8c66f646-071a-42e8-b551-18c8fd4c6df4" (UID: "8c66f646-071a-42e8-b551-18c8fd4c6df4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.168476 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c66f646-071a-42e8-b551-18c8fd4c6df4-kube-api-access-zmgmd" (OuterVolumeSpecName: "kube-api-access-zmgmd") pod "8c66f646-071a-42e8-b551-18c8fd4c6df4" (UID: "8c66f646-071a-42e8-b551-18c8fd4c6df4"). InnerVolumeSpecName "kube-api-access-zmgmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.216032 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c66f646-071a-42e8-b551-18c8fd4c6df4" (UID: "8c66f646-071a-42e8-b551-18c8fd4c6df4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.218453 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-config-data" (OuterVolumeSpecName: "config-data") pod "8c66f646-071a-42e8-b551-18c8fd4c6df4" (UID: "8c66f646-071a-42e8-b551-18c8fd4c6df4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.261230 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.261265 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.261276 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c66f646-071a-42e8-b551-18c8fd4c6df4-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.261288 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmgmd\" (UniqueName: \"kubernetes.io/projected/8c66f646-071a-42e8-b551-18c8fd4c6df4-kube-api-access-zmgmd\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.610895 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hs42m" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.610934 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hs42m" event={"ID":"8c66f646-071a-42e8-b551-18c8fd4c6df4","Type":"ContainerDied","Data":"913f0861eed8d3a19e549d7ae53aa1ea598c74eb2002cf75105cfa62ac7e72b5"} Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.610983 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="913f0861eed8d3a19e549d7ae53aa1ea598c74eb2002cf75105cfa62ac7e72b5" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.613425 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" event={"ID":"ecc0abab-30fa-418a-ac1f-f7f3d288c33d","Type":"ContainerDied","Data":"556d2b9d583c114eee7b8bfb1c548a5a01958772322a870e483c9955f1ff9308"} Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.613437 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-nb44g" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.613480 4721 scope.go:117] "RemoveContainer" containerID="24aae85cbc1753e135b85726e5d8fdb8df020bc9e84aeac9328333bd38c193ba" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.727814 4721 scope.go:117] "RemoveContainer" containerID="02b4d59804b8d84741d4e19ec6c32d0330f1bbe1d63a3b6ded1fcbaa74a60764" Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.735822 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-nb44g"] Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.753359 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-nb44g"] Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.775714 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.775970 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-log" containerID="cri-o://32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3" gracePeriod=30 Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.777087 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-api" containerID="cri-o://103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307" gracePeriod=30 Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.791156 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.802400 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.802844 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-log" containerID="cri-o://78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945" gracePeriod=30 Jan 30 21:52:11 crc kubenswrapper[4721]: I0130 21:52:11.803391 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-metadata" containerID="cri-o://7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011" gracePeriod=30 Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.133901 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" path="/var/lib/kubelet/pods/ecc0abab-30fa-418a-ac1f-f7f3d288c33d/volumes" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.437731 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.490720 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-nova-metadata-tls-certs\") pod \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.490833 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-config-data\") pod \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.490929 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-combined-ca-bundle\") pod \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.490964 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98blf\" (UniqueName: \"kubernetes.io/projected/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-kube-api-access-98blf\") pod \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.491146 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-logs\") pod \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\" (UID: \"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54\") " Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.493323 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-logs" (OuterVolumeSpecName: "logs") pod "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" (UID: "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.506489 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-kube-api-access-98blf" (OuterVolumeSpecName: "kube-api-access-98blf") pod "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" (UID: "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54"). InnerVolumeSpecName "kube-api-access-98blf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.534624 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-config-data" (OuterVolumeSpecName: "config-data") pod "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" (UID: "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.543534 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" (UID: "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.572805 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" (UID: "e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.593214 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.593250 4721 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.593262 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.593272 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.593281 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98blf\" (UniqueName: \"kubernetes.io/projected/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54-kube-api-access-98blf\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.627641 4721 generic.go:334] "Generic (PLEG): container finished" podID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerID="32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3" exitCode=143 Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.627704 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"97762a1c-ffe6-48d6-ba91-57dabd31b5b8","Type":"ContainerDied","Data":"32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3"} Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.631723 4721 generic.go:334] "Generic (PLEG): container finished" podID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerID="7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011" exitCode=0 Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.631760 4721 generic.go:334] "Generic (PLEG): container finished" podID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerID="78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945" exitCode=143 Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.632165 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.632161 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54","Type":"ContainerDied","Data":"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011"} Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.632586 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54","Type":"ContainerDied","Data":"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945"} Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.632601 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54","Type":"ContainerDied","Data":"ab69860a5951ccc639ce0fde15f849edaf04d7f2f2927670de67f0099db638cf"} Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.632618 4721 scope.go:117] "RemoveContainer" containerID="7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.717254 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.723315 4721 scope.go:117] "RemoveContainer" containerID="78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.731436 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.747658 4721 scope.go:117] "RemoveContainer" containerID="7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011" Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.748504 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011\": container with ID starting with 7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011 not found: ID does not exist" containerID="7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.748560 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011"} err="failed to get container status \"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011\": rpc error: code = NotFound desc = could not find container \"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011\": container with ID starting with 7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011 not found: ID does not exist" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.748589 4721 scope.go:117] "RemoveContainer" containerID="78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945" Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.750323 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945\": container with ID starting with 78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945 not found: ID does not exist" containerID="78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.750372 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945"} err="failed to get container status \"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945\": rpc error: code = NotFound desc = could not find container \"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945\": container with ID starting with 78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945 not found: ID does not exist" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.750398 4721 scope.go:117] "RemoveContainer" containerID="7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.751225 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011"} err="failed to get container status \"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011\": rpc error: code = NotFound desc = could not find container \"7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011\": container with ID starting with 7860eb007b7ceddd9c6fda6c75020ffff7a6c48477bc7e24418873ee27765011 not found: ID does not exist" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.751254 4721 scope.go:117] "RemoveContainer" containerID="78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.751870 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945"} err="failed to get container status \"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945\": rpc error: code = NotFound desc = could not find container \"78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945\": container with ID starting with 78bf6c52c497545a8f9726fc076428069ac10da5c62c3d465aedec39ef6fe945 not found: ID does not exist" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.752829 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.753312 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c66f646-071a-42e8-b551-18c8fd4c6df4" containerName="nova-manage" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753330 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c66f646-071a-42e8-b551-18c8fd4c6df4" containerName="nova-manage" Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.753354 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-metadata" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753363 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-metadata" Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.753376 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerName="init" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753381 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerName="init" Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.753393 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerName="dnsmasq-dns" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753399 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerName="dnsmasq-dns" Jan 30 21:52:12 crc kubenswrapper[4721]: E0130 21:52:12.753411 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-log" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753417 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-log" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753633 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c66f646-071a-42e8-b551-18c8fd4c6df4" containerName="nova-manage" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753649 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc0abab-30fa-418a-ac1f-f7f3d288c33d" containerName="dnsmasq-dns" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753663 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-metadata" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.753671 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" containerName="nova-metadata-log" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.754787 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.762919 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.763210 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.785968 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.800127 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d78ef6a-c771-40a8-b988-042f40e7d0e2-logs\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.800189 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-config-data\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.800263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.800432 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.800586 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clbzn\" (UniqueName: \"kubernetes.io/projected/6d78ef6a-c771-40a8-b988-042f40e7d0e2-kube-api-access-clbzn\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.902741 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.902877 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.902907 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clbzn\" (UniqueName: \"kubernetes.io/projected/6d78ef6a-c771-40a8-b988-042f40e7d0e2-kube-api-access-clbzn\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.902966 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d78ef6a-c771-40a8-b988-042f40e7d0e2-logs\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.902992 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-config-data\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.904528 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d78ef6a-c771-40a8-b988-042f40e7d0e2-logs\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.907881 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.907900 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-config-data\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.912528 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:12 crc kubenswrapper[4721]: I0130 21:52:12.929860 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clbzn\" (UniqueName: \"kubernetes.io/projected/6d78ef6a-c771-40a8-b988-042f40e7d0e2-kube-api-access-clbzn\") pod \"nova-metadata-0\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " pod="openstack/nova-metadata-0" Jan 30 21:52:13 crc kubenswrapper[4721]: I0130 21:52:13.074481 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:52:13 crc kubenswrapper[4721]: I0130 21:52:13.551064 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:52:13 crc kubenswrapper[4721]: W0130 21:52:13.553577 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d78ef6a_c771_40a8_b988_042f40e7d0e2.slice/crio-f5690b9354fdbfcb8e5ae82d8ee2d509333c16fa379f6652caf37ffc9ac80571 WatchSource:0}: Error finding container f5690b9354fdbfcb8e5ae82d8ee2d509333c16fa379f6652caf37ffc9ac80571: Status 404 returned error can't find the container with id f5690b9354fdbfcb8e5ae82d8ee2d509333c16fa379f6652caf37ffc9ac80571 Jan 30 21:52:13 crc kubenswrapper[4721]: I0130 21:52:13.648031 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" containerName="nova-scheduler-scheduler" containerID="cri-o://996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64" gracePeriod=30 Jan 30 21:52:13 crc kubenswrapper[4721]: I0130 21:52:13.648895 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d78ef6a-c771-40a8-b988-042f40e7d0e2","Type":"ContainerStarted","Data":"f5690b9354fdbfcb8e5ae82d8ee2d509333c16fa379f6652caf37ffc9ac80571"} Jan 30 21:52:14 crc kubenswrapper[4721]: I0130 21:52:14.113157 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54" path="/var/lib/kubelet/pods/e0922f0f-5d2c-4b07-9b2f-b9d93bbc8b54/volumes" Jan 30 21:52:14 crc kubenswrapper[4721]: I0130 21:52:14.658708 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d78ef6a-c771-40a8-b988-042f40e7d0e2","Type":"ContainerStarted","Data":"61f14b05955e6aadc95309a876881121bb5f5d42039761e660b82bb840e4f843"} Jan 30 21:52:14 crc kubenswrapper[4721]: I0130 21:52:14.659040 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d78ef6a-c771-40a8-b988-042f40e7d0e2","Type":"ContainerStarted","Data":"fe5d56a0a24da9a13580bd6abdb1c4d1c4e9407745375745a50708110fb24896"} Jan 30 21:52:14 crc kubenswrapper[4721]: I0130 21:52:14.660333 4721 generic.go:334] "Generic (PLEG): container finished" podID="f5db7a32-67d1-48b4-802f-0711a9e32eb2" containerID="e618f6d2c31f7c68ad4e3e1479d2259b6e7ed9c9883e137e3960be8c8e935b4d" exitCode=0 Jan 30 21:52:14 crc kubenswrapper[4721]: I0130 21:52:14.660383 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-56v9p" event={"ID":"f5db7a32-67d1-48b4-802f-0711a9e32eb2","Type":"ContainerDied","Data":"e618f6d2c31f7c68ad4e3e1479d2259b6e7ed9c9883e137e3960be8c8e935b4d"} Jan 30 21:52:14 crc kubenswrapper[4721]: I0130 21:52:14.678992 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.678969612 podStartE2EDuration="2.678969612s" podCreationTimestamp="2026-01-30 21:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:14.675277497 +0000 UTC m=+2123.467178773" watchObservedRunningTime="2026-01-30 21:52:14.678969612 +0000 UTC m=+2123.470870858" Jan 30 21:52:14 crc kubenswrapper[4721]: E0130 21:52:14.861212 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 21:52:14 crc kubenswrapper[4721]: E0130 21:52:14.862926 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 21:52:14 crc kubenswrapper[4721]: E0130 21:52:14.864867 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 21:52:14 crc kubenswrapper[4721]: E0130 21:52:14.864928 4721 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" containerName="nova-scheduler-scheduler" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.155561 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.279779 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-scripts\") pod \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.280121 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-config-data\") pod \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.280174 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlwqq\" (UniqueName: \"kubernetes.io/projected/f5db7a32-67d1-48b4-802f-0711a9e32eb2-kube-api-access-tlwqq\") pod \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.280270 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-combined-ca-bundle\") pod \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\" (UID: \"f5db7a32-67d1-48b4-802f-0711a9e32eb2\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.287444 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5db7a32-67d1-48b4-802f-0711a9e32eb2-kube-api-access-tlwqq" (OuterVolumeSpecName: "kube-api-access-tlwqq") pod "f5db7a32-67d1-48b4-802f-0711a9e32eb2" (UID: "f5db7a32-67d1-48b4-802f-0711a9e32eb2"). InnerVolumeSpecName "kube-api-access-tlwqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.288369 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-scripts" (OuterVolumeSpecName: "scripts") pod "f5db7a32-67d1-48b4-802f-0711a9e32eb2" (UID: "f5db7a32-67d1-48b4-802f-0711a9e32eb2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.347981 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-config-data" (OuterVolumeSpecName: "config-data") pod "f5db7a32-67d1-48b4-802f-0711a9e32eb2" (UID: "f5db7a32-67d1-48b4-802f-0711a9e32eb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.352654 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5db7a32-67d1-48b4-802f-0711a9e32eb2" (UID: "f5db7a32-67d1-48b4-802f-0711a9e32eb2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.382877 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.382914 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.382923 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5db7a32-67d1-48b4-802f-0711a9e32eb2-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.382933 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlwqq\" (UniqueName: \"kubernetes.io/projected/f5db7a32-67d1-48b4-802f-0711a9e32eb2-kube-api-access-tlwqq\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.658439 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fxxh4"] Jan 30 21:52:16 crc kubenswrapper[4721]: E0130 21:52:16.658933 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5db7a32-67d1-48b4-802f-0711a9e32eb2" containerName="nova-cell1-conductor-db-sync" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.658945 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5db7a32-67d1-48b4-802f-0711a9e32eb2" containerName="nova-cell1-conductor-db-sync" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.659181 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5db7a32-67d1-48b4-802f-0711a9e32eb2" containerName="nova-cell1-conductor-db-sync" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.660707 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.668138 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fxxh4"] Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.698220 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-utilities\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.698321 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz9h7\" (UniqueName: \"kubernetes.io/projected/7290b128-3568-44a1-ae57-7b89f3d9caeb-kube-api-access-jz9h7\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.698403 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-catalog-content\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.739059 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.750660 4721 generic.go:334] "Generic (PLEG): container finished" podID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerID="103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307" exitCode=0 Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.750731 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"97762a1c-ffe6-48d6-ba91-57dabd31b5b8","Type":"ContainerDied","Data":"103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307"} Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.750788 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"97762a1c-ffe6-48d6-ba91-57dabd31b5b8","Type":"ContainerDied","Data":"edc10525b1f93d8ae3843833b1b3deaf57cdaaccc552ae2875ffe15db78f1241"} Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.750807 4721 scope.go:117] "RemoveContainer" containerID="103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.753563 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-56v9p" event={"ID":"f5db7a32-67d1-48b4-802f-0711a9e32eb2","Type":"ContainerDied","Data":"cd01054e93cb5abf96002acfaa52554db0784e0d7bef0f910029040ba283ae35"} Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.753585 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd01054e93cb5abf96002acfaa52554db0784e0d7bef0f910029040ba283ae35" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.753637 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-56v9p" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.799925 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-logs\") pod \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.800486 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snw6r\" (UniqueName: \"kubernetes.io/projected/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-kube-api-access-snw6r\") pod \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.800621 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-config-data\") pod \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.800705 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-combined-ca-bundle\") pod \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\" (UID: \"97762a1c-ffe6-48d6-ba91-57dabd31b5b8\") " Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.800786 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-logs" (OuterVolumeSpecName: "logs") pod "97762a1c-ffe6-48d6-ba91-57dabd31b5b8" (UID: "97762a1c-ffe6-48d6-ba91-57dabd31b5b8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.808199 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-utilities\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.808353 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz9h7\" (UniqueName: \"kubernetes.io/projected/7290b128-3568-44a1-ae57-7b89f3d9caeb-kube-api-access-jz9h7\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.808563 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-catalog-content\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.808863 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.809530 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-catalog-content\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.809531 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-utilities\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.812334 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-kube-api-access-snw6r" (OuterVolumeSpecName: "kube-api-access-snw6r") pod "97762a1c-ffe6-48d6-ba91-57dabd31b5b8" (UID: "97762a1c-ffe6-48d6-ba91-57dabd31b5b8"). InnerVolumeSpecName "kube-api-access-snw6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.817102 4721 scope.go:117] "RemoveContainer" containerID="32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.876843 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 21:52:16 crc kubenswrapper[4721]: E0130 21:52:16.877276 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-log" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.877308 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-log" Jan 30 21:52:16 crc kubenswrapper[4721]: E0130 21:52:16.877320 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-api" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.877326 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-api" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.877540 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-api" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.877560 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" containerName="nova-api-log" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.881008 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.887080 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz9h7\" (UniqueName: \"kubernetes.io/projected/7290b128-3568-44a1-ae57-7b89f3d9caeb-kube-api-access-jz9h7\") pod \"community-operators-fxxh4\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.887185 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.892179 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97762a1c-ffe6-48d6-ba91-57dabd31b5b8" (UID: "97762a1c-ffe6-48d6-ba91-57dabd31b5b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.899023 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-config-data" (OuterVolumeSpecName: "config-data") pod "97762a1c-ffe6-48d6-ba91-57dabd31b5b8" (UID: "97762a1c-ffe6-48d6-ba91-57dabd31b5b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.900065 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.910654 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snw6r\" (UniqueName: \"kubernetes.io/projected/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-kube-api-access-snw6r\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.910696 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.910731 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97762a1c-ffe6-48d6-ba91-57dabd31b5b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.939998 4721 scope.go:117] "RemoveContainer" containerID="103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307" Jan 30 21:52:16 crc kubenswrapper[4721]: E0130 21:52:16.940484 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307\": container with ID starting with 103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307 not found: ID does not exist" containerID="103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.940513 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307"} err="failed to get container status \"103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307\": rpc error: code = NotFound desc = could not find container \"103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307\": container with ID starting with 103c9fc2e965f32de3404f9a14a7a05e2926e0bd8eb025477f54613c47a82307 not found: ID does not exist" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.940534 4721 scope.go:117] "RemoveContainer" containerID="32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3" Jan 30 21:52:16 crc kubenswrapper[4721]: E0130 21:52:16.941343 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3\": container with ID starting with 32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3 not found: ID does not exist" containerID="32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3" Jan 30 21:52:16 crc kubenswrapper[4721]: I0130 21:52:16.941407 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3"} err="failed to get container status \"32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3\": rpc error: code = NotFound desc = could not find container \"32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3\": container with ID starting with 32cf4cc738d6a4f59363a4af21cf3748848b3854ac859e8a40d3c20289e9c0d3 not found: ID does not exist" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.012142 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca8e7a1-e433-4c9a-9532-f695fedd853e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.012498 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7kd7\" (UniqueName: \"kubernetes.io/projected/0ca8e7a1-e433-4c9a-9532-f695fedd853e-kube-api-access-h7kd7\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.012716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca8e7a1-e433-4c9a-9532-f695fedd853e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.046935 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.115024 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7kd7\" (UniqueName: \"kubernetes.io/projected/0ca8e7a1-e433-4c9a-9532-f695fedd853e-kube-api-access-h7kd7\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.115363 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca8e7a1-e433-4c9a-9532-f695fedd853e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.115424 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca8e7a1-e433-4c9a-9532-f695fedd853e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.121250 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ca8e7a1-e433-4c9a-9532-f695fedd853e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.121820 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ca8e7a1-e433-4c9a-9532-f695fedd853e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.141962 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7kd7\" (UniqueName: \"kubernetes.io/projected/0ca8e7a1-e433-4c9a-9532-f695fedd853e-kube-api-access-h7kd7\") pod \"nova-cell1-conductor-0\" (UID: \"0ca8e7a1-e433-4c9a-9532-f695fedd853e\") " pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:17 crc kubenswrapper[4721]: I0130 21:52:17.216547 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.544029 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fxxh4"] Jan 30 21:52:18 crc kubenswrapper[4721]: W0130 21:52:17.549841 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7290b128_3568_44a1_ae57_7b89f3d9caeb.slice/crio-4c8f0c90ea4f4e7db04ad1e348d4b7b6715c01482bc2e9732838e7529a70a8a6 WatchSource:0}: Error finding container 4c8f0c90ea4f4e7db04ad1e348d4b7b6715c01482bc2e9732838e7529a70a8a6: Status 404 returned error can't find the container with id 4c8f0c90ea4f4e7db04ad1e348d4b7b6715c01482bc2e9732838e7529a70a8a6 Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.765515 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.768735 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerStarted","Data":"4c8f0c90ea4f4e7db04ad1e348d4b7b6715c01482bc2e9732838e7529a70a8a6"} Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.828042 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.876904 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.900768 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.903368 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.905566 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:17.912922 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.038845 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-config-data\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.038918 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/920aadff-0704-437f-89e7-bae6b121727e-logs\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.038966 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.039010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhlrf\" (UniqueName: \"kubernetes.io/projected/920aadff-0704-437f-89e7-bae6b121727e-kube-api-access-dhlrf\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.075398 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.076686 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.104335 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97762a1c-ffe6-48d6-ba91-57dabd31b5b8" path="/var/lib/kubelet/pods/97762a1c-ffe6-48d6-ba91-57dabd31b5b8/volumes" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.141339 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-config-data\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.141499 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/920aadff-0704-437f-89e7-bae6b121727e-logs\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.141559 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.141605 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhlrf\" (UniqueName: \"kubernetes.io/projected/920aadff-0704-437f-89e7-bae6b121727e-kube-api-access-dhlrf\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.142996 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/920aadff-0704-437f-89e7-bae6b121727e-logs\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.149967 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-config-data\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.150520 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.158492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhlrf\" (UniqueName: \"kubernetes.io/projected/920aadff-0704-437f-89e7-bae6b121727e-kube-api-access-dhlrf\") pod \"nova-api-0\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.338780 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.780808 4721 generic.go:334] "Generic (PLEG): container finished" podID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerID="81aab703c587901332e14653191d70073ccd29a486dd8d208119a89e586417fa" exitCode=0 Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.780909 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerDied","Data":"81aab703c587901332e14653191d70073ccd29a486dd8d208119a89e586417fa"} Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.784635 4721 generic.go:334] "Generic (PLEG): container finished" podID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" containerID="996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64" exitCode=0 Jan 30 21:52:18 crc kubenswrapper[4721]: I0130 21:52:18.784737 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ac830b68-ee1f-4f4f-9906-8d26f9fd0534","Type":"ContainerDied","Data":"996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64"} Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.395864 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.429513 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.466610 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.467443 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmlhg\" (UniqueName: \"kubernetes.io/projected/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-kube-api-access-cmlhg\") pod \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.467563 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-combined-ca-bundle\") pod \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.467661 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-config-data\") pod \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\" (UID: \"ac830b68-ee1f-4f4f-9906-8d26f9fd0534\") " Jan 30 21:52:19 crc kubenswrapper[4721]: W0130 21:52:19.468474 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ca8e7a1_e433_4c9a_9532_f695fedd853e.slice/crio-f2a00c0aa6cb3262c8f16c11291f8fe3dbc8785cce8c0001189f91ea11ea4f45 WatchSource:0}: Error finding container f2a00c0aa6cb3262c8f16c11291f8fe3dbc8785cce8c0001189f91ea11ea4f45: Status 404 returned error can't find the container with id f2a00c0aa6cb3262c8f16c11291f8fe3dbc8785cce8c0001189f91ea11ea4f45 Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.477211 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-kube-api-access-cmlhg" (OuterVolumeSpecName: "kube-api-access-cmlhg") pod "ac830b68-ee1f-4f4f-9906-8d26f9fd0534" (UID: "ac830b68-ee1f-4f4f-9906-8d26f9fd0534"). InnerVolumeSpecName "kube-api-access-cmlhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:19 crc kubenswrapper[4721]: W0130 21:52:19.478194 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod920aadff_0704_437f_89e7_bae6b121727e.slice/crio-e3c9c1883a96f1fe7becc2c97cb9dbe630ac9c5a9c6e3d35d447262c6c8194c7 WatchSource:0}: Error finding container e3c9c1883a96f1fe7becc2c97cb9dbe630ac9c5a9c6e3d35d447262c6c8194c7: Status 404 returned error can't find the container with id e3c9c1883a96f1fe7becc2c97cb9dbe630ac9c5a9c6e3d35d447262c6c8194c7 Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.503593 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-config-data" (OuterVolumeSpecName: "config-data") pod "ac830b68-ee1f-4f4f-9906-8d26f9fd0534" (UID: "ac830b68-ee1f-4f4f-9906-8d26f9fd0534"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.508977 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac830b68-ee1f-4f4f-9906-8d26f9fd0534" (UID: "ac830b68-ee1f-4f4f-9906-8d26f9fd0534"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.570957 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmlhg\" (UniqueName: \"kubernetes.io/projected/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-kube-api-access-cmlhg\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.571314 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.571328 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac830b68-ee1f-4f4f-9906-8d26f9fd0534-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.806468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"920aadff-0704-437f-89e7-bae6b121727e","Type":"ContainerStarted","Data":"e3c9c1883a96f1fe7becc2c97cb9dbe630ac9c5a9c6e3d35d447262c6c8194c7"} Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.810765 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ac830b68-ee1f-4f4f-9906-8d26f9fd0534","Type":"ContainerDied","Data":"4b1a8fa870dc0e131f59ff9395ad1b8da2749f7a5aeaf45d9a245bf7293dd8e5"} Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.810832 4721 scope.go:117] "RemoveContainer" containerID="996d05cf53bfb4489b5236c507388b3ab5a10668c46c2b1139f5c2f9ac335d64" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.811232 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.822804 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"0ca8e7a1-e433-4c9a-9532-f695fedd853e","Type":"ContainerStarted","Data":"f2a00c0aa6cb3262c8f16c11291f8fe3dbc8785cce8c0001189f91ea11ea4f45"} Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.847175 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.857283 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.874284 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:19 crc kubenswrapper[4721]: E0130 21:52:19.875447 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" containerName="nova-scheduler-scheduler" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.875501 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" containerName="nova-scheduler-scheduler" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.875851 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" containerName="nova-scheduler-scheduler" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.877043 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.879587 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.884163 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.980134 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.980267 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpl5n\" (UniqueName: \"kubernetes.io/projected/d870c1ca-99c0-4f06-93d1-299d2827d0fe-kube-api-access-jpl5n\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:19 crc kubenswrapper[4721]: I0130 21:52:19.980995 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-config-data\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.082790 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpl5n\" (UniqueName: \"kubernetes.io/projected/d870c1ca-99c0-4f06-93d1-299d2827d0fe-kube-api-access-jpl5n\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.083056 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-config-data\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.083122 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.089125 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.089865 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-config-data\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.100883 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpl5n\" (UniqueName: \"kubernetes.io/projected/d870c1ca-99c0-4f06-93d1-299d2827d0fe-kube-api-access-jpl5n\") pod \"nova-scheduler-0\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.107467 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac830b68-ee1f-4f4f-9906-8d26f9fd0534" path="/var/lib/kubelet/pods/ac830b68-ee1f-4f4f-9906-8d26f9fd0534/volumes" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.210529 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.730647 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:52:20 crc kubenswrapper[4721]: W0130 21:52:20.737856 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd870c1ca_99c0_4f06_93d1_299d2827d0fe.slice/crio-0df9ff92ed099784ec930d736ce018d6cfd53dd1fca37bbfcd27385d8cc87eb6 WatchSource:0}: Error finding container 0df9ff92ed099784ec930d736ce018d6cfd53dd1fca37bbfcd27385d8cc87eb6: Status 404 returned error can't find the container with id 0df9ff92ed099784ec930d736ce018d6cfd53dd1fca37bbfcd27385d8cc87eb6 Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.839881 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"920aadff-0704-437f-89e7-bae6b121727e","Type":"ContainerStarted","Data":"3f9c5efeb97e0d91a75e65b03c86420c097cb97903024a2137765ce6826e57cf"} Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.839929 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"920aadff-0704-437f-89e7-bae6b121727e","Type":"ContainerStarted","Data":"de86b7ea147032730fe4558be12d8d2a71cece31f8e4b75a7ec6c87bb9982aee"} Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.843739 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"0ca8e7a1-e433-4c9a-9532-f695fedd853e","Type":"ContainerStarted","Data":"144dd004d51d76b7c9239fd2f91b995badea15b66b6bf9e30f727eb92e478915"} Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.844257 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.848181 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerStarted","Data":"729aed228ccbba9f8c91979120ee8d835b55db9326d2728f9f503b42885b1a7f"} Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.851820 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d870c1ca-99c0-4f06-93d1-299d2827d0fe","Type":"ContainerStarted","Data":"0df9ff92ed099784ec930d736ce018d6cfd53dd1fca37bbfcd27385d8cc87eb6"} Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.876699 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.876679991 podStartE2EDuration="3.876679991s" podCreationTimestamp="2026-01-30 21:52:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:20.862374047 +0000 UTC m=+2129.654275293" watchObservedRunningTime="2026-01-30 21:52:20.876679991 +0000 UTC m=+2129.668581237" Jan 30 21:52:20 crc kubenswrapper[4721]: I0130 21:52:20.890745 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=4.8907235159999995 podStartE2EDuration="4.890723516s" podCreationTimestamp="2026-01-30 21:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:20.876392252 +0000 UTC m=+2129.668293518" watchObservedRunningTime="2026-01-30 21:52:20.890723516 +0000 UTC m=+2129.682624762" Jan 30 21:52:21 crc kubenswrapper[4721]: I0130 21:52:21.865491 4721 generic.go:334] "Generic (PLEG): container finished" podID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerID="729aed228ccbba9f8c91979120ee8d835b55db9326d2728f9f503b42885b1a7f" exitCode=0 Jan 30 21:52:21 crc kubenswrapper[4721]: I0130 21:52:21.865613 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerDied","Data":"729aed228ccbba9f8c91979120ee8d835b55db9326d2728f9f503b42885b1a7f"} Jan 30 21:52:21 crc kubenswrapper[4721]: I0130 21:52:21.869478 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d870c1ca-99c0-4f06-93d1-299d2827d0fe","Type":"ContainerStarted","Data":"82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b"} Jan 30 21:52:21 crc kubenswrapper[4721]: I0130 21:52:21.922904 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.9228784230000002 podStartE2EDuration="2.922878423s" podCreationTimestamp="2026-01-30 21:52:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:21.905456332 +0000 UTC m=+2130.697357628" watchObservedRunningTime="2026-01-30 21:52:21.922878423 +0000 UTC m=+2130.714779709" Jan 30 21:52:22 crc kubenswrapper[4721]: I0130 21:52:22.513354 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 30 21:52:22 crc kubenswrapper[4721]: I0130 21:52:22.882065 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerStarted","Data":"5300c6f90a0feee235708903cf9f9245d641faed35abf244962f941059a4771d"} Jan 30 21:52:22 crc kubenswrapper[4721]: I0130 21:52:22.924195 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fxxh4" podStartSLOduration=3.286034759 podStartE2EDuration="6.924164532s" podCreationTimestamp="2026-01-30 21:52:16 +0000 UTC" firstStartedPulling="2026-01-30 21:52:18.783695577 +0000 UTC m=+2127.575596823" lastFinishedPulling="2026-01-30 21:52:22.42182535 +0000 UTC m=+2131.213726596" observedRunningTime="2026-01-30 21:52:22.906484154 +0000 UTC m=+2131.698385420" watchObservedRunningTime="2026-01-30 21:52:22.924164532 +0000 UTC m=+2131.716065798" Jan 30 21:52:23 crc kubenswrapper[4721]: I0130 21:52:23.078439 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 21:52:23 crc kubenswrapper[4721]: I0130 21:52:23.078492 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 21:52:24 crc kubenswrapper[4721]: I0130 21:52:24.087453 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:24 crc kubenswrapper[4721]: I0130 21:52:24.087477 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:25 crc kubenswrapper[4721]: I0130 21:52:25.211804 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 30 21:52:26 crc kubenswrapper[4721]: I0130 21:52:26.461425 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:52:26 crc kubenswrapper[4721]: I0130 21:52:26.462212 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b357dd79-4a4f-48c7-ba6f-058ca30785f5" containerName="kube-state-metrics" containerID="cri-o://4b5e1b9a82e3de2004b2855460b59a8a6d77f77c6ae46c4d49d8b5c214fc340c" gracePeriod=30 Jan 30 21:52:26 crc kubenswrapper[4721]: I0130 21:52:26.923688 4721 generic.go:334] "Generic (PLEG): container finished" podID="b357dd79-4a4f-48c7-ba6f-058ca30785f5" containerID="4b5e1b9a82e3de2004b2855460b59a8a6d77f77c6ae46c4d49d8b5c214fc340c" exitCode=2 Jan 30 21:52:26 crc kubenswrapper[4721]: I0130 21:52:26.923754 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b357dd79-4a4f-48c7-ba6f-058ca30785f5","Type":"ContainerDied","Data":"4b5e1b9a82e3de2004b2855460b59a8a6d77f77c6ae46c4d49d8b5c214fc340c"} Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.047779 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.047842 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.150087 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.237360 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j4n8\" (UniqueName: \"kubernetes.io/projected/b357dd79-4a4f-48c7-ba6f-058ca30785f5-kube-api-access-2j4n8\") pod \"b357dd79-4a4f-48c7-ba6f-058ca30785f5\" (UID: \"b357dd79-4a4f-48c7-ba6f-058ca30785f5\") " Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.253884 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b357dd79-4a4f-48c7-ba6f-058ca30785f5-kube-api-access-2j4n8" (OuterVolumeSpecName: "kube-api-access-2j4n8") pod "b357dd79-4a4f-48c7-ba6f-058ca30785f5" (UID: "b357dd79-4a4f-48c7-ba6f-058ca30785f5"). InnerVolumeSpecName "kube-api-access-2j4n8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.260897 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.358664 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j4n8\" (UniqueName: \"kubernetes.io/projected/b357dd79-4a4f-48c7-ba6f-058ca30785f5-kube-api-access-2j4n8\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.935767 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b357dd79-4a4f-48c7-ba6f-058ca30785f5","Type":"ContainerDied","Data":"e8cc390e27cea4f710306d3e27144383ed96863136a8ccc67eb46383356fb152"} Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.935834 4721 scope.go:117] "RemoveContainer" containerID="4b5e1b9a82e3de2004b2855460b59a8a6d77f77c6ae46c4d49d8b5c214fc340c" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.936602 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.977533 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.986136 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.994874 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:52:27 crc kubenswrapper[4721]: E0130 21:52:27.995496 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b357dd79-4a4f-48c7-ba6f-058ca30785f5" containerName="kube-state-metrics" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.995518 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b357dd79-4a4f-48c7-ba6f-058ca30785f5" containerName="kube-state-metrics" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.995742 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b357dd79-4a4f-48c7-ba6f-058ca30785f5" containerName="kube-state-metrics" Jan 30 21:52:27 crc kubenswrapper[4721]: I0130 21:52:27.996599 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.002422 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.002633 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.036121 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.074810 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8kjr\" (UniqueName: \"kubernetes.io/projected/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-api-access-z8kjr\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.074869 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.074886 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.074937 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.103077 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-fxxh4" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="registry-server" probeResult="failure" output=< Jan 30 21:52:28 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:52:28 crc kubenswrapper[4721]: > Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.103750 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b357dd79-4a4f-48c7-ba6f-058ca30785f5" path="/var/lib/kubelet/pods/b357dd79-4a4f-48c7-ba6f-058ca30785f5/volumes" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.176664 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8kjr\" (UniqueName: \"kubernetes.io/projected/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-api-access-z8kjr\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.176744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.176774 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.176836 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.182407 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.182954 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.183230 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e82608-6b61-42c4-b4fc-6f1fe545e119-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.192274 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8kjr\" (UniqueName: \"kubernetes.io/projected/b8e82608-6b61-42c4-b4fc-6f1fe545e119-kube-api-access-z8kjr\") pod \"kube-state-metrics-0\" (UID: \"b8e82608-6b61-42c4-b4fc-6f1fe545e119\") " pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.313367 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.339214 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.339835 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.805716 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.924680 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.924949 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-central-agent" containerID="cri-o://29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7" gracePeriod=30 Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.925020 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="sg-core" containerID="cri-o://80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3" gracePeriod=30 Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.925134 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="proxy-httpd" containerID="cri-o://4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122" gracePeriod=30 Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.925384 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-notification-agent" containerID="cri-o://7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85" gracePeriod=30 Jan 30 21:52:28 crc kubenswrapper[4721]: I0130 21:52:28.948559 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b8e82608-6b61-42c4-b4fc-6f1fe545e119","Type":"ContainerStarted","Data":"078062ffd6de2b6029cdb3e5ed9ddd5afed910805b68e68752facea2dde302f1"} Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.422588 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.227:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.422612 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.227:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.961988 4721 generic.go:334] "Generic (PLEG): container finished" podID="247013ba-8786-402c-b3aa-30113e5001b4" containerID="4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122" exitCode=0 Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.962252 4721 generic.go:334] "Generic (PLEG): container finished" podID="247013ba-8786-402c-b3aa-30113e5001b4" containerID="80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3" exitCode=2 Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.962266 4721 generic.go:334] "Generic (PLEG): container finished" podID="247013ba-8786-402c-b3aa-30113e5001b4" containerID="29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7" exitCode=0 Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.962083 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerDied","Data":"4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122"} Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.962351 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerDied","Data":"80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3"} Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.962381 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerDied","Data":"29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7"} Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.964538 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b8e82608-6b61-42c4-b4fc-6f1fe545e119","Type":"ContainerStarted","Data":"2c36ae2d50adcedc2a45af46b691761f8c4800e6f794562cca6c5bcd4ec5fe94"} Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.964836 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 30 21:52:29 crc kubenswrapper[4721]: I0130 21:52:29.986725 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.582843472 podStartE2EDuration="2.986704458s" podCreationTimestamp="2026-01-30 21:52:27 +0000 UTC" firstStartedPulling="2026-01-30 21:52:28.791558616 +0000 UTC m=+2137.583459862" lastFinishedPulling="2026-01-30 21:52:29.195419602 +0000 UTC m=+2137.987320848" observedRunningTime="2026-01-30 21:52:29.983155198 +0000 UTC m=+2138.775056464" watchObservedRunningTime="2026-01-30 21:52:29.986704458 +0000 UTC m=+2138.778605734" Jan 30 21:52:30 crc kubenswrapper[4721]: I0130 21:52:30.211342 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 30 21:52:30 crc kubenswrapper[4721]: I0130 21:52:30.246899 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 30 21:52:31 crc kubenswrapper[4721]: I0130 21:52:31.016551 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 30 21:52:33 crc kubenswrapper[4721]: I0130 21:52:33.081470 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 21:52:33 crc kubenswrapper[4721]: I0130 21:52:33.086935 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 21:52:33 crc kubenswrapper[4721]: I0130 21:52:33.087956 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.026129 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.829619 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.962515 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pslv9\" (UniqueName: \"kubernetes.io/projected/247013ba-8786-402c-b3aa-30113e5001b4-kube-api-access-pslv9\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.962566 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-config-data\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.962605 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-log-httpd\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.962698 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-scripts\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.962843 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-combined-ca-bundle\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.962913 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-run-httpd\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.963020 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-sg-core-conf-yaml\") pod \"247013ba-8786-402c-b3aa-30113e5001b4\" (UID: \"247013ba-8786-402c-b3aa-30113e5001b4\") " Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.963553 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.963780 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.969140 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-scripts" (OuterVolumeSpecName: "scripts") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:34 crc kubenswrapper[4721]: I0130 21:52:34.969594 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/247013ba-8786-402c-b3aa-30113e5001b4-kube-api-access-pslv9" (OuterVolumeSpecName: "kube-api-access-pslv9") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "kube-api-access-pslv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.000389 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.037959 4721 generic.go:334] "Generic (PLEG): container finished" podID="247013ba-8786-402c-b3aa-30113e5001b4" containerID="7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85" exitCode=0 Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.039043 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.039666 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerDied","Data":"7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85"} Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.039708 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"247013ba-8786-402c-b3aa-30113e5001b4","Type":"ContainerDied","Data":"29833a64dfc8afbec9c8838448238e924cfcee57bc76452dc05327b65c8cfe77"} Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.039729 4721 scope.go:117] "RemoveContainer" containerID="4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.048142 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.065362 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pslv9\" (UniqueName: \"kubernetes.io/projected/247013ba-8786-402c-b3aa-30113e5001b4-kube-api-access-pslv9\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.065397 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.065410 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.065418 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.065428 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/247013ba-8786-402c-b3aa-30113e5001b4-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.065438 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.093815 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-config-data" (OuterVolumeSpecName: "config-data") pod "247013ba-8786-402c-b3aa-30113e5001b4" (UID: "247013ba-8786-402c-b3aa-30113e5001b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.101596 4721 scope.go:117] "RemoveContainer" containerID="80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.132921 4721 scope.go:117] "RemoveContainer" containerID="7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.158919 4721 scope.go:117] "RemoveContainer" containerID="29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.167132 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247013ba-8786-402c-b3aa-30113e5001b4-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.184394 4721 scope.go:117] "RemoveContainer" containerID="4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.185116 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122\": container with ID starting with 4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122 not found: ID does not exist" containerID="4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.185155 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122"} err="failed to get container status \"4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122\": rpc error: code = NotFound desc = could not find container \"4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122\": container with ID starting with 4a12efe136a572add571d28450e3bf0910f1d6fcded3511e72698fc9d8ffe122 not found: ID does not exist" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.185183 4721 scope.go:117] "RemoveContainer" containerID="80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.185814 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3\": container with ID starting with 80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3 not found: ID does not exist" containerID="80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.185853 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3"} err="failed to get container status \"80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3\": rpc error: code = NotFound desc = could not find container \"80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3\": container with ID starting with 80798bb3ed5f333378c7d37f99f0e9027550ba56d43a7354fbd5c2ceb9ff1ca3 not found: ID does not exist" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.185877 4721 scope.go:117] "RemoveContainer" containerID="7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.186290 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85\": container with ID starting with 7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85 not found: ID does not exist" containerID="7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.186348 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85"} err="failed to get container status \"7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85\": rpc error: code = NotFound desc = could not find container \"7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85\": container with ID starting with 7b501dda4fc24c9258be9326a97955478e4a6ea2b0c7af3e0b82a85c29d9bc85 not found: ID does not exist" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.186366 4721 scope.go:117] "RemoveContainer" containerID="29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.186643 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7\": container with ID starting with 29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7 not found: ID does not exist" containerID="29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.186678 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7"} err="failed to get container status \"29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7\": rpc error: code = NotFound desc = could not find container \"29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7\": container with ID starting with 29db52924e875e87db24d006ba25d0d02118046f16016d80b202f5ab2cc79ce7 not found: ID does not exist" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.377369 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.390571 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.413721 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.414380 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-central-agent" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414403 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-central-agent" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.414451 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="proxy-httpd" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414462 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="proxy-httpd" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.414485 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-notification-agent" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414502 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-notification-agent" Jan 30 21:52:35 crc kubenswrapper[4721]: E0130 21:52:35.414519 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="sg-core" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414534 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="sg-core" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414864 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-notification-agent" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414896 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="ceilometer-central-agent" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414909 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="proxy-httpd" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.414933 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="247013ba-8786-402c-b3aa-30113e5001b4" containerName="sg-core" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.417924 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.423274 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.423335 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.423655 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.423698 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.473808 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8nbk\" (UniqueName: \"kubernetes.io/projected/832234f1-e5b9-4d22-b368-dbbd15b58fda-kube-api-access-s8nbk\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.474088 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.474841 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-config-data\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.475008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-scripts\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.475238 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-run-httpd\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.475448 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.475710 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.475997 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-log-httpd\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578475 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578603 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-config-data\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578637 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-scripts\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578676 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-run-httpd\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578738 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578856 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.578921 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-log-httpd\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.579495 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-run-httpd\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.579574 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8nbk\" (UniqueName: \"kubernetes.io/projected/832234f1-e5b9-4d22-b368-dbbd15b58fda-kube-api-access-s8nbk\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.579617 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-log-httpd\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.584036 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.584164 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.584613 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-scripts\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.587164 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.587680 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-config-data\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.611494 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8nbk\" (UniqueName: \"kubernetes.io/projected/832234f1-e5b9-4d22-b368-dbbd15b58fda-kube-api-access-s8nbk\") pod \"ceilometer-0\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " pod="openstack/ceilometer-0" Jan 30 21:52:35 crc kubenswrapper[4721]: I0130 21:52:35.749761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:36 crc kubenswrapper[4721]: I0130 21:52:36.107373 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="247013ba-8786-402c-b3aa-30113e5001b4" path="/var/lib/kubelet/pods/247013ba-8786-402c-b3aa-30113e5001b4/volumes" Jan 30 21:52:36 crc kubenswrapper[4721]: I0130 21:52:36.244844 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:36 crc kubenswrapper[4721]: W0130 21:52:36.252913 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod832234f1_e5b9_4d22_b368_dbbd15b58fda.slice/crio-f1032b80a5b1510c1485a2e5b3ae3b5083c33e4a783161d14084b701b7d86b39 WatchSource:0}: Error finding container f1032b80a5b1510c1485a2e5b3ae3b5083c33e4a783161d14084b701b7d86b39: Status 404 returned error can't find the container with id f1032b80a5b1510c1485a2e5b3ae3b5083c33e4a783161d14084b701b7d86b39 Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.061949 4721 generic.go:334] "Generic (PLEG): container finished" podID="617fc783-10bd-4dce-b2ff-3d10bf6451a7" containerID="232bda3c110a7fd950109400b3a3c951da38b7594eefff4ba1a383e7c3f2ec64" exitCode=137 Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.061990 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"617fc783-10bd-4dce-b2ff-3d10bf6451a7","Type":"ContainerDied","Data":"232bda3c110a7fd950109400b3a3c951da38b7594eefff4ba1a383e7c3f2ec64"} Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.062559 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"617fc783-10bd-4dce-b2ff-3d10bf6451a7","Type":"ContainerDied","Data":"44cff1c51468ab208a110663c1b6207c100a7ce1f5f809f1243b1f1bdbaf0d63"} Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.062573 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44cff1c51468ab208a110663c1b6207c100a7ce1f5f809f1243b1f1bdbaf0d63" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.070976 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerStarted","Data":"f1032b80a5b1510c1485a2e5b3ae3b5083c33e4a783161d14084b701b7d86b39"} Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.107205 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.115502 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.179258 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.217877 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-combined-ca-bundle\") pod \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.217926 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-config-data\") pod \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.217962 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb4pb\" (UniqueName: \"kubernetes.io/projected/617fc783-10bd-4dce-b2ff-3d10bf6451a7-kube-api-access-mb4pb\") pod \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\" (UID: \"617fc783-10bd-4dce-b2ff-3d10bf6451a7\") " Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.222580 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/617fc783-10bd-4dce-b2ff-3d10bf6451a7-kube-api-access-mb4pb" (OuterVolumeSpecName: "kube-api-access-mb4pb") pod "617fc783-10bd-4dce-b2ff-3d10bf6451a7" (UID: "617fc783-10bd-4dce-b2ff-3d10bf6451a7"). InnerVolumeSpecName "kube-api-access-mb4pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.255673 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-config-data" (OuterVolumeSpecName: "config-data") pod "617fc783-10bd-4dce-b2ff-3d10bf6451a7" (UID: "617fc783-10bd-4dce-b2ff-3d10bf6451a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.264012 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "617fc783-10bd-4dce-b2ff-3d10bf6451a7" (UID: "617fc783-10bd-4dce-b2ff-3d10bf6451a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.320837 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.321211 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617fc783-10bd-4dce-b2ff-3d10bf6451a7-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.321225 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb4pb\" (UniqueName: \"kubernetes.io/projected/617fc783-10bd-4dce-b2ff-3d10bf6451a7-kube-api-access-mb4pb\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:37 crc kubenswrapper[4721]: I0130 21:52:37.357471 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fxxh4"] Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.085039 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerStarted","Data":"ec5a129f22d25a177cb09ff95ef25de306eb511120ac2e69e2329316fbc9a826"} Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.085095 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.136831 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.158700 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.178225 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:38 crc kubenswrapper[4721]: E0130 21:52:38.178934 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="617fc783-10bd-4dce-b2ff-3d10bf6451a7" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.178961 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="617fc783-10bd-4dce-b2ff-3d10bf6451a7" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.179267 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="617fc783-10bd-4dce-b2ff-3d10bf6451a7" containerName="nova-cell1-novncproxy-novncproxy" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.180355 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.181986 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.183528 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.183575 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.191407 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.242323 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.242434 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.242530 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.242556 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2922t\" (UniqueName: \"kubernetes.io/projected/55612f9b-a463-4acf-9f8a-647372b6c4a0-kube-api-access-2922t\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.242587 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.334095 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.346484 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.346570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.346722 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.346754 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2922t\" (UniqueName: \"kubernetes.io/projected/55612f9b-a463-4acf-9f8a-647372b6c4a0-kube-api-access-2922t\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.346810 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.347822 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.347921 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.349917 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.349970 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.353082 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.389348 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.390510 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.394684 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.394859 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55612f9b-a463-4acf-9f8a-647372b6c4a0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.399665 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.399697 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2922t\" (UniqueName: \"kubernetes.io/projected/55612f9b-a463-4acf-9f8a-647372b6c4a0-kube-api-access-2922t\") pod \"nova-cell1-novncproxy-0\" (UID: \"55612f9b-a463-4acf-9f8a-647372b6c4a0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.512244 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.615830 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-vclzn"] Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.617612 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.652384 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-vclzn"] Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.654683 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-config\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.654751 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.654798 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8b2t\" (UniqueName: \"kubernetes.io/projected/cad43101-ea31-4866-9692-3d0229454653-kube-api-access-f8b2t\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.654823 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.654845 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.654952 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.756820 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8b2t\" (UniqueName: \"kubernetes.io/projected/cad43101-ea31-4866-9692-3d0229454653-kube-api-access-f8b2t\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.757129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.757155 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.757275 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.757326 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-config\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.757360 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.758207 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.758579 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.758801 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.759154 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.759344 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-config\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.787655 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8b2t\" (UniqueName: \"kubernetes.io/projected/cad43101-ea31-4866-9692-3d0229454653-kube-api-access-f8b2t\") pod \"dnsmasq-dns-5fd9b586ff-vclzn\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:38 crc kubenswrapper[4721]: I0130 21:52:38.982414 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:39 crc kubenswrapper[4721]: I0130 21:52:39.114491 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerStarted","Data":"6536634af7054879c7bca4e5a1a377e14669d85218947aef6c40d90738ee3cea"} Jan 30 21:52:39 crc kubenswrapper[4721]: I0130 21:52:39.114895 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fxxh4" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="registry-server" containerID="cri-o://5300c6f90a0feee235708903cf9f9245d641faed35abf244962f941059a4771d" gracePeriod=2 Jan 30 21:52:39 crc kubenswrapper[4721]: I0130 21:52:39.200008 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 30 21:52:39 crc kubenswrapper[4721]: I0130 21:52:39.601910 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-vclzn"] Jan 30 21:52:39 crc kubenswrapper[4721]: W0130 21:52:39.629896 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcad43101_ea31_4866_9692_3d0229454653.slice/crio-e3707ab9ad6086a7c317cb5648d26180d0c5b6f6b3583cce260163c376e8f6f3 WatchSource:0}: Error finding container e3707ab9ad6086a7c317cb5648d26180d0c5b6f6b3583cce260163c376e8f6f3: Status 404 returned error can't find the container with id e3707ab9ad6086a7c317cb5648d26180d0c5b6f6b3583cce260163c376e8f6f3 Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.133989 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="617fc783-10bd-4dce-b2ff-3d10bf6451a7" path="/var/lib/kubelet/pods/617fc783-10bd-4dce-b2ff-3d10bf6451a7/volumes" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.173717 4721 generic.go:334] "Generic (PLEG): container finished" podID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerID="5300c6f90a0feee235708903cf9f9245d641faed35abf244962f941059a4771d" exitCode=0 Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.173781 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerDied","Data":"5300c6f90a0feee235708903cf9f9245d641faed35abf244962f941059a4771d"} Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.178494 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"55612f9b-a463-4acf-9f8a-647372b6c4a0","Type":"ContainerStarted","Data":"2c8829cfa47ba057d7b224c43510255cc24969c12cdd7ceed90bbb01ef0cecf8"} Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.178536 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"55612f9b-a463-4acf-9f8a-647372b6c4a0","Type":"ContainerStarted","Data":"e159d8e0e49e7edcd42ef078fff988d8be388997b787d8b5eb29a275f3b56c1f"} Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.183585 4721 generic.go:334] "Generic (PLEG): container finished" podID="cad43101-ea31-4866-9692-3d0229454653" containerID="c16bf7c51d0d28a3a7616f7dab46219c3261d656031e046b094026becb2a9924" exitCode=0 Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.183659 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" event={"ID":"cad43101-ea31-4866-9692-3d0229454653","Type":"ContainerDied","Data":"c16bf7c51d0d28a3a7616f7dab46219c3261d656031e046b094026becb2a9924"} Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.183683 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" event={"ID":"cad43101-ea31-4866-9692-3d0229454653","Type":"ContainerStarted","Data":"e3707ab9ad6086a7c317cb5648d26180d0c5b6f6b3583cce260163c376e8f6f3"} Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.236391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerStarted","Data":"23cff145f90e3ee2f1bfa6a9293307fb88350223988fa9fac052a23a817e4a7b"} Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.277378 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.277351678 podStartE2EDuration="2.277351678s" podCreationTimestamp="2026-01-30 21:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:40.216709737 +0000 UTC m=+2149.008610983" watchObservedRunningTime="2026-01-30 21:52:40.277351678 +0000 UTC m=+2149.069252924" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.505127 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.659269 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-utilities\") pod \"7290b128-3568-44a1-ae57-7b89f3d9caeb\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.659442 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz9h7\" (UniqueName: \"kubernetes.io/projected/7290b128-3568-44a1-ae57-7b89f3d9caeb-kube-api-access-jz9h7\") pod \"7290b128-3568-44a1-ae57-7b89f3d9caeb\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.659528 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-catalog-content\") pod \"7290b128-3568-44a1-ae57-7b89f3d9caeb\" (UID: \"7290b128-3568-44a1-ae57-7b89f3d9caeb\") " Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.660507 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-utilities" (OuterVolumeSpecName: "utilities") pod "7290b128-3568-44a1-ae57-7b89f3d9caeb" (UID: "7290b128-3568-44a1-ae57-7b89f3d9caeb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.666671 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7290b128-3568-44a1-ae57-7b89f3d9caeb-kube-api-access-jz9h7" (OuterVolumeSpecName: "kube-api-access-jz9h7") pod "7290b128-3568-44a1-ae57-7b89f3d9caeb" (UID: "7290b128-3568-44a1-ae57-7b89f3d9caeb"). InnerVolumeSpecName "kube-api-access-jz9h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.717771 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7290b128-3568-44a1-ae57-7b89f3d9caeb" (UID: "7290b128-3568-44a1-ae57-7b89f3d9caeb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.761764 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.761803 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz9h7\" (UniqueName: \"kubernetes.io/projected/7290b128-3568-44a1-ae57-7b89f3d9caeb-kube-api-access-jz9h7\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:40 crc kubenswrapper[4721]: I0130 21:52:40.761819 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7290b128-3568-44a1-ae57-7b89f3d9caeb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.253561 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" event={"ID":"cad43101-ea31-4866-9692-3d0229454653","Type":"ContainerStarted","Data":"ac6f85eb59f30f19803847a2b67db3f2961f88f4b0fe929d21ad455525873237"} Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.254056 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.257050 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxxh4" event={"ID":"7290b128-3568-44a1-ae57-7b89f3d9caeb","Type":"ContainerDied","Data":"4c8f0c90ea4f4e7db04ad1e348d4b7b6715c01482bc2e9732838e7529a70a8a6"} Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.257117 4721 scope.go:117] "RemoveContainer" containerID="5300c6f90a0feee235708903cf9f9245d641faed35abf244962f941059a4771d" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.257075 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxxh4" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.286125 4721 scope.go:117] "RemoveContainer" containerID="729aed228ccbba9f8c91979120ee8d835b55db9326d2728f9f503b42885b1a7f" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.288385 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" podStartSLOduration=3.288360609 podStartE2EDuration="3.288360609s" podCreationTimestamp="2026-01-30 21:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:41.277568354 +0000 UTC m=+2150.069469600" watchObservedRunningTime="2026-01-30 21:52:41.288360609 +0000 UTC m=+2150.080261855" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.309075 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fxxh4"] Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.316450 4721 scope.go:117] "RemoveContainer" containerID="81aab703c587901332e14653191d70073ccd29a486dd8d208119a89e586417fa" Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.322450 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fxxh4"] Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.418746 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.419359 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-log" containerID="cri-o://de86b7ea147032730fe4558be12d8d2a71cece31f8e4b75a7ec6c87bb9982aee" gracePeriod=30 Jan 30 21:52:41 crc kubenswrapper[4721]: I0130 21:52:41.420083 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-api" containerID="cri-o://3f9c5efeb97e0d91a75e65b03c86420c097cb97903024a2137765ce6826e57cf" gracePeriod=30 Jan 30 21:52:42 crc kubenswrapper[4721]: I0130 21:52:42.105729 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" path="/var/lib/kubelet/pods/7290b128-3568-44a1-ae57-7b89f3d9caeb/volumes" Jan 30 21:52:42 crc kubenswrapper[4721]: I0130 21:52:42.220529 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:42 crc kubenswrapper[4721]: I0130 21:52:42.271879 4721 generic.go:334] "Generic (PLEG): container finished" podID="920aadff-0704-437f-89e7-bae6b121727e" containerID="de86b7ea147032730fe4558be12d8d2a71cece31f8e4b75a7ec6c87bb9982aee" exitCode=143 Jan 30 21:52:42 crc kubenswrapper[4721]: I0130 21:52:42.271998 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"920aadff-0704-437f-89e7-bae6b121727e","Type":"ContainerDied","Data":"de86b7ea147032730fe4558be12d8d2a71cece31f8e4b75a7ec6c87bb9982aee"} Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.301865 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerStarted","Data":"ce4062a5b3961756aa153ba5941ae287688b30f9e5708397f7a5cd5719191d9a"} Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.304810 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.302110 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="sg-core" containerID="cri-o://23cff145f90e3ee2f1bfa6a9293307fb88350223988fa9fac052a23a817e4a7b" gracePeriod=30 Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.302142 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-central-agent" containerID="cri-o://ec5a129f22d25a177cb09ff95ef25de306eb511120ac2e69e2329316fbc9a826" gracePeriod=30 Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.302191 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-notification-agent" containerID="cri-o://6536634af7054879c7bca4e5a1a377e14669d85218947aef6c40d90738ee3cea" gracePeriod=30 Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.302099 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="proxy-httpd" containerID="cri-o://ce4062a5b3961756aa153ba5941ae287688b30f9e5708397f7a5cd5719191d9a" gracePeriod=30 Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.327867 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.167259923 podStartE2EDuration="8.327848912s" podCreationTimestamp="2026-01-30 21:52:35 +0000 UTC" firstStartedPulling="2026-01-30 21:52:36.256640117 +0000 UTC m=+2145.048541363" lastFinishedPulling="2026-01-30 21:52:42.417229106 +0000 UTC m=+2151.209130352" observedRunningTime="2026-01-30 21:52:43.327545562 +0000 UTC m=+2152.119446808" watchObservedRunningTime="2026-01-30 21:52:43.327848912 +0000 UTC m=+2152.119750158" Jan 30 21:52:43 crc kubenswrapper[4721]: I0130 21:52:43.513647 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.319730 4721 generic.go:334] "Generic (PLEG): container finished" podID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerID="ce4062a5b3961756aa153ba5941ae287688b30f9e5708397f7a5cd5719191d9a" exitCode=0 Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.320390 4721 generic.go:334] "Generic (PLEG): container finished" podID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerID="23cff145f90e3ee2f1bfa6a9293307fb88350223988fa9fac052a23a817e4a7b" exitCode=2 Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.319794 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerDied","Data":"ce4062a5b3961756aa153ba5941ae287688b30f9e5708397f7a5cd5719191d9a"} Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.320450 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerDied","Data":"23cff145f90e3ee2f1bfa6a9293307fb88350223988fa9fac052a23a817e4a7b"} Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.320469 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerDied","Data":"6536634af7054879c7bca4e5a1a377e14669d85218947aef6c40d90738ee3cea"} Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.320405 4721 generic.go:334] "Generic (PLEG): container finished" podID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerID="6536634af7054879c7bca4e5a1a377e14669d85218947aef6c40d90738ee3cea" exitCode=0 Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.774836 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zztpc"] Jan 30 21:52:44 crc kubenswrapper[4721]: E0130 21:52:44.775386 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="registry-server" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.775401 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="registry-server" Jan 30 21:52:44 crc kubenswrapper[4721]: E0130 21:52:44.775451 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="extract-utilities" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.775460 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="extract-utilities" Jan 30 21:52:44 crc kubenswrapper[4721]: E0130 21:52:44.775483 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="extract-content" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.775492 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="extract-content" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.775749 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7290b128-3568-44a1-ae57-7b89f3d9caeb" containerName="registry-server" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.777688 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.795871 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zztpc"] Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.860667 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-utilities\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.860948 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-catalog-content\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.860977 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mxk6\" (UniqueName: \"kubernetes.io/projected/3b94fabe-19f8-485d-9b4d-592dd9f4e706-kube-api-access-5mxk6\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.963064 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-utilities\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.963225 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-catalog-content\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.963249 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mxk6\" (UniqueName: \"kubernetes.io/projected/3b94fabe-19f8-485d-9b4d-592dd9f4e706-kube-api-access-5mxk6\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.963754 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-utilities\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.963774 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-catalog-content\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:44 crc kubenswrapper[4721]: I0130 21:52:44.981479 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mxk6\" (UniqueName: \"kubernetes.io/projected/3b94fabe-19f8-485d-9b4d-592dd9f4e706-kube-api-access-5mxk6\") pod \"redhat-marketplace-zztpc\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:45 crc kubenswrapper[4721]: I0130 21:52:45.114593 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:45 crc kubenswrapper[4721]: I0130 21:52:45.346808 4721 generic.go:334] "Generic (PLEG): container finished" podID="920aadff-0704-437f-89e7-bae6b121727e" containerID="3f9c5efeb97e0d91a75e65b03c86420c097cb97903024a2137765ce6826e57cf" exitCode=0 Jan 30 21:52:45 crc kubenswrapper[4721]: I0130 21:52:45.346869 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"920aadff-0704-437f-89e7-bae6b121727e","Type":"ContainerDied","Data":"3f9c5efeb97e0d91a75e65b03c86420c097cb97903024a2137765ce6826e57cf"} Jan 30 21:52:45 crc kubenswrapper[4721]: I0130 21:52:45.362065 4721 generic.go:334] "Generic (PLEG): container finished" podID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerID="ec5a129f22d25a177cb09ff95ef25de306eb511120ac2e69e2329316fbc9a826" exitCode=0 Jan 30 21:52:45 crc kubenswrapper[4721]: I0130 21:52:45.362107 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerDied","Data":"ec5a129f22d25a177cb09ff95ef25de306eb511120ac2e69e2329316fbc9a826"} Jan 30 21:52:45 crc kubenswrapper[4721]: I0130 21:52:45.635951 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zztpc"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.006648 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.018025 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087601 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-scripts\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087698 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-combined-ca-bundle\") pod \"920aadff-0704-437f-89e7-bae6b121727e\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087759 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-ceilometer-tls-certs\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087824 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-sg-core-conf-yaml\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087863 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-run-httpd\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087882 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-combined-ca-bundle\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.087930 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-config-data\") pod \"920aadff-0704-437f-89e7-bae6b121727e\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.088000 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhlrf\" (UniqueName: \"kubernetes.io/projected/920aadff-0704-437f-89e7-bae6b121727e-kube-api-access-dhlrf\") pod \"920aadff-0704-437f-89e7-bae6b121727e\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.088056 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/920aadff-0704-437f-89e7-bae6b121727e-logs\") pod \"920aadff-0704-437f-89e7-bae6b121727e\" (UID: \"920aadff-0704-437f-89e7-bae6b121727e\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.088091 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-log-httpd\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.088121 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-config-data\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.088144 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8nbk\" (UniqueName: \"kubernetes.io/projected/832234f1-e5b9-4d22-b368-dbbd15b58fda-kube-api-access-s8nbk\") pod \"832234f1-e5b9-4d22-b368-dbbd15b58fda\" (UID: \"832234f1-e5b9-4d22-b368-dbbd15b58fda\") " Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.089747 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.090015 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.090202 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/920aadff-0704-437f-89e7-bae6b121727e-logs" (OuterVolumeSpecName: "logs") pod "920aadff-0704-437f-89e7-bae6b121727e" (UID: "920aadff-0704-437f-89e7-bae6b121727e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.097464 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/920aadff-0704-437f-89e7-bae6b121727e-kube-api-access-dhlrf" (OuterVolumeSpecName: "kube-api-access-dhlrf") pod "920aadff-0704-437f-89e7-bae6b121727e" (UID: "920aadff-0704-437f-89e7-bae6b121727e"). InnerVolumeSpecName "kube-api-access-dhlrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.101526 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-scripts" (OuterVolumeSpecName: "scripts") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.107608 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/832234f1-e5b9-4d22-b368-dbbd15b58fda-kube-api-access-s8nbk" (OuterVolumeSpecName: "kube-api-access-s8nbk") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "kube-api-access-s8nbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.186328 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-config-data" (OuterVolumeSpecName: "config-data") pod "920aadff-0704-437f-89e7-bae6b121727e" (UID: "920aadff-0704-437f-89e7-bae6b121727e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.193654 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhlrf\" (UniqueName: \"kubernetes.io/projected/920aadff-0704-437f-89e7-bae6b121727e-kube-api-access-dhlrf\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.193985 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/920aadff-0704-437f-89e7-bae6b121727e-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.193998 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.194013 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8nbk\" (UniqueName: \"kubernetes.io/projected/832234f1-e5b9-4d22-b368-dbbd15b58fda-kube-api-access-s8nbk\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.194022 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.194032 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/832234f1-e5b9-4d22-b368-dbbd15b58fda-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.194043 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.204987 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.230212 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "920aadff-0704-437f-89e7-bae6b121727e" (UID: "920aadff-0704-437f-89e7-bae6b121727e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.232983 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.262766 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-config-data" (OuterVolumeSpecName: "config-data") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.290689 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "832234f1-e5b9-4d22-b368-dbbd15b58fda" (UID: "832234f1-e5b9-4d22-b368-dbbd15b58fda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.296750 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.296799 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.296812 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920aadff-0704-437f-89e7-bae6b121727e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.296834 4721 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.296846 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/832234f1-e5b9-4d22-b368-dbbd15b58fda-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.376253 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"920aadff-0704-437f-89e7-bae6b121727e","Type":"ContainerDied","Data":"e3c9c1883a96f1fe7becc2c97cb9dbe630ac9c5a9c6e3d35d447262c6c8194c7"} Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.376418 4721 scope.go:117] "RemoveContainer" containerID="3f9c5efeb97e0d91a75e65b03c86420c097cb97903024a2137765ce6826e57cf" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.376856 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.383276 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"832234f1-e5b9-4d22-b368-dbbd15b58fda","Type":"ContainerDied","Data":"f1032b80a5b1510c1485a2e5b3ae3b5083c33e4a783161d14084b701b7d86b39"} Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.383438 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.396064 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zztpc" event={"ID":"3b94fabe-19f8-485d-9b4d-592dd9f4e706","Type":"ContainerStarted","Data":"50695f8b7f9a047282defcb5001b6ae5b7a50b80c7c998539723f928cda41cfa"} Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.407819 4721 scope.go:117] "RemoveContainer" containerID="de86b7ea147032730fe4558be12d8d2a71cece31f8e4b75a7ec6c87bb9982aee" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.427279 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.448456 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.463374 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.464587 4721 scope.go:117] "RemoveContainer" containerID="ce4062a5b3961756aa153ba5941ae287688b30f9e5708397f7a5cd5719191d9a" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.481380 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500394 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: E0130 21:52:46.500813 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-central-agent" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500831 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-central-agent" Jan 30 21:52:46 crc kubenswrapper[4721]: E0130 21:52:46.500851 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="sg-core" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500858 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="sg-core" Jan 30 21:52:46 crc kubenswrapper[4721]: E0130 21:52:46.500873 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-api" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500880 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-api" Jan 30 21:52:46 crc kubenswrapper[4721]: E0130 21:52:46.500900 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-notification-agent" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500906 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-notification-agent" Jan 30 21:52:46 crc kubenswrapper[4721]: E0130 21:52:46.500917 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-log" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500924 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-log" Jan 30 21:52:46 crc kubenswrapper[4721]: E0130 21:52:46.500937 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="proxy-httpd" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.500942 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="proxy-httpd" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.501134 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="proxy-httpd" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.501144 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-central-agent" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.501154 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="sg-core" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.501168 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-api" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.501184 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" containerName="ceilometer-notification-agent" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.501193 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="920aadff-0704-437f-89e7-bae6b121727e" containerName="nova-api-log" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.502569 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.507535 4721 scope.go:117] "RemoveContainer" containerID="23cff145f90e3ee2f1bfa6a9293307fb88350223988fa9fac052a23a817e4a7b" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.507693 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.507730 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.507818 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.530893 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.533412 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.537107 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.537383 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.537999 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.543621 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.546278 4721 scope.go:117] "RemoveContainer" containerID="6536634af7054879c7bca4e5a1a377e14669d85218947aef6c40d90738ee3cea" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.555231 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.577345 4721 scope.go:117] "RemoveContainer" containerID="ec5a129f22d25a177cb09ff95ef25de306eb511120ac2e69e2329316fbc9a826" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.604597 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-config-data\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.604697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.604816 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-log-httpd\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.604913 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ltgz\" (UniqueName: \"kubernetes.io/projected/b1f26399-4d0d-4ec2-a777-21c0fff8d509-kube-api-access-4ltgz\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-scripts\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605076 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-config-data\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605152 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605202 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605223 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-public-tls-certs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605246 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mthtg\" (UniqueName: \"kubernetes.io/projected/391f567e-1d28-4e02-8490-799e10ed88c3-kube-api-access-mthtg\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605277 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605322 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1f26399-4d0d-4ec2-a777-21c0fff8d509-logs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605390 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-run-httpd\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.605470 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.706813 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.706862 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-log-httpd\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.706899 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ltgz\" (UniqueName: \"kubernetes.io/projected/b1f26399-4d0d-4ec2-a777-21c0fff8d509-kube-api-access-4ltgz\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.706931 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-scripts\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.706956 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-config-data\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.706984 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707012 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707027 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-public-tls-certs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mthtg\" (UniqueName: \"kubernetes.io/projected/391f567e-1d28-4e02-8490-799e10ed88c3-kube-api-access-mthtg\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707073 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707095 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1f26399-4d0d-4ec2-a777-21c0fff8d509-logs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707141 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-run-httpd\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707195 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-config-data\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707443 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-log-httpd\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707885 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1f26399-4d0d-4ec2-a777-21c0fff8d509-logs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.707728 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-run-httpd\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.712477 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-config-data\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.712753 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.713004 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.714508 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-config-data\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.717478 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-public-tls-certs\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.717954 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.718663 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.721159 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-scripts\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.729239 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ltgz\" (UniqueName: \"kubernetes.io/projected/b1f26399-4d0d-4ec2-a777-21c0fff8d509-kube-api-access-4ltgz\") pod \"nova-api-0\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.733234 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.739021 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mthtg\" (UniqueName: \"kubernetes.io/projected/391f567e-1d28-4e02-8490-799e10ed88c3-kube-api-access-mthtg\") pod \"ceilometer-0\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " pod="openstack/ceilometer-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.826127 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:52:46 crc kubenswrapper[4721]: I0130 21:52:46.857575 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:52:47 crc kubenswrapper[4721]: W0130 21:52:47.358460 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1f26399_4d0d_4ec2_a777_21c0fff8d509.slice/crio-344a6fa36b2cb325b665f0bcfee0ebe79c86a5d19fa2a067110b29f3543aada5 WatchSource:0}: Error finding container 344a6fa36b2cb325b665f0bcfee0ebe79c86a5d19fa2a067110b29f3543aada5: Status 404 returned error can't find the container with id 344a6fa36b2cb325b665f0bcfee0ebe79c86a5d19fa2a067110b29f3543aada5 Jan 30 21:52:47 crc kubenswrapper[4721]: I0130 21:52:47.371949 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:52:47 crc kubenswrapper[4721]: I0130 21:52:47.408316 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1f26399-4d0d-4ec2-a777-21c0fff8d509","Type":"ContainerStarted","Data":"344a6fa36b2cb325b665f0bcfee0ebe79c86a5d19fa2a067110b29f3543aada5"} Jan 30 21:52:47 crc kubenswrapper[4721]: I0130 21:52:47.410145 4721 generic.go:334] "Generic (PLEG): container finished" podID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerID="b609aabcbf96056f02aef72f22c8ef8027cf9a245cdf447fb6b26703e02e4b5f" exitCode=0 Jan 30 21:52:47 crc kubenswrapper[4721]: I0130 21:52:47.410217 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zztpc" event={"ID":"3b94fabe-19f8-485d-9b4d-592dd9f4e706","Type":"ContainerDied","Data":"b609aabcbf96056f02aef72f22c8ef8027cf9a245cdf447fb6b26703e02e4b5f"} Jan 30 21:52:47 crc kubenswrapper[4721]: I0130 21:52:47.480690 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:52:47 crc kubenswrapper[4721]: W0130 21:52:47.486605 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod391f567e_1d28_4e02_8490_799e10ed88c3.slice/crio-963717bc9854f3f4075a63cac27497e43a06339d9731557a5e1602fa64a9abdb WatchSource:0}: Error finding container 963717bc9854f3f4075a63cac27497e43a06339d9731557a5e1602fa64a9abdb: Status 404 returned error can't find the container with id 963717bc9854f3f4075a63cac27497e43a06339d9731557a5e1602fa64a9abdb Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.111216 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="832234f1-e5b9-4d22-b368-dbbd15b58fda" path="/var/lib/kubelet/pods/832234f1-e5b9-4d22-b368-dbbd15b58fda/volumes" Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.113193 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="920aadff-0704-437f-89e7-bae6b121727e" path="/var/lib/kubelet/pods/920aadff-0704-437f-89e7-bae6b121727e/volumes" Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.427407 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerStarted","Data":"963717bc9854f3f4075a63cac27497e43a06339d9731557a5e1602fa64a9abdb"} Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.429080 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1f26399-4d0d-4ec2-a777-21c0fff8d509","Type":"ContainerStarted","Data":"139ce157571dd497f3e623e750eb27e61eca7d2bb430d6c844ef92cdc1e94078"} Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.429111 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1f26399-4d0d-4ec2-a777-21c0fff8d509","Type":"ContainerStarted","Data":"c53da16978f0d2ee975b46a677e71f11af09afb226c2348398de3807af7e56ca"} Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.449734 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.44971957 podStartE2EDuration="2.44971957s" podCreationTimestamp="2026-01-30 21:52:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:48.44746696 +0000 UTC m=+2157.239368206" watchObservedRunningTime="2026-01-30 21:52:48.44971957 +0000 UTC m=+2157.241620816" Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.513477 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.537061 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:48 crc kubenswrapper[4721]: I0130 21:52:48.984915 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.071823 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zrv56"] Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.072668 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78cd565959-zrv56" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="dnsmasq-dns" containerID="cri-o://8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861" gracePeriod=10 Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.470670 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.672206 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-46g5v"] Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.674993 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.678725 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.678975 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.693468 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-46g5v"] Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.782563 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-config-data\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.782752 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d922v\" (UniqueName: \"kubernetes.io/projected/f511802d-cde5-4900-8a57-b06ebf1bab3b-kube-api-access-d922v\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.782920 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-scripts\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.783027 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.885244 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.885343 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-config-data\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.885412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d922v\" (UniqueName: \"kubernetes.io/projected/f511802d-cde5-4900-8a57-b06ebf1bab3b-kube-api-access-d922v\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.885480 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-scripts\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.903343 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.905586 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d922v\" (UniqueName: \"kubernetes.io/projected/f511802d-cde5-4900-8a57-b06ebf1bab3b-kube-api-access-d922v\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.906416 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-scripts\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:49 crc kubenswrapper[4721]: I0130 21:52:49.910920 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-config-data\") pod \"nova-cell1-cell-mapping-46g5v\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.010486 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.289652 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.394752 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-svc\") pod \"ced291de-0920-46ed-a3e0-2c064b072df0\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.394955 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-sb\") pod \"ced291de-0920-46ed-a3e0-2c064b072df0\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.394989 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qmr9\" (UniqueName: \"kubernetes.io/projected/ced291de-0920-46ed-a3e0-2c064b072df0-kube-api-access-5qmr9\") pod \"ced291de-0920-46ed-a3e0-2c064b072df0\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.395077 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-swift-storage-0\") pod \"ced291de-0920-46ed-a3e0-2c064b072df0\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.395100 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-config\") pod \"ced291de-0920-46ed-a3e0-2c064b072df0\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.395126 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-nb\") pod \"ced291de-0920-46ed-a3e0-2c064b072df0\" (UID: \"ced291de-0920-46ed-a3e0-2c064b072df0\") " Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.404620 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced291de-0920-46ed-a3e0-2c064b072df0-kube-api-access-5qmr9" (OuterVolumeSpecName: "kube-api-access-5qmr9") pod "ced291de-0920-46ed-a3e0-2c064b072df0" (UID: "ced291de-0920-46ed-a3e0-2c064b072df0"). InnerVolumeSpecName "kube-api-access-5qmr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.463406 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ced291de-0920-46ed-a3e0-2c064b072df0" (UID: "ced291de-0920-46ed-a3e0-2c064b072df0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.467275 4721 generic.go:334] "Generic (PLEG): container finished" podID="ced291de-0920-46ed-a3e0-2c064b072df0" containerID="8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861" exitCode=0 Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.467418 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zrv56" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.467339 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zrv56" event={"ID":"ced291de-0920-46ed-a3e0-2c064b072df0","Type":"ContainerDied","Data":"8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861"} Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.467518 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zrv56" event={"ID":"ced291de-0920-46ed-a3e0-2c064b072df0","Type":"ContainerDied","Data":"86f9ea9e98edfe17cc068e9209fc47c5f1e39a61e14b7076e6b67b48769b23fa"} Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.467538 4721 scope.go:117] "RemoveContainer" containerID="8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.474168 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerStarted","Data":"d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad"} Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.474197 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ced291de-0920-46ed-a3e0-2c064b072df0" (UID: "ced291de-0920-46ed-a3e0-2c064b072df0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.506245 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.506484 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ced291de-0920-46ed-a3e0-2c064b072df0" (UID: "ced291de-0920-46ed-a3e0-2c064b072df0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.506502 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.506626 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qmr9\" (UniqueName: \"kubernetes.io/projected/ced291de-0920-46ed-a3e0-2c064b072df0-kube-api-access-5qmr9\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.517873 4721 scope.go:117] "RemoveContainer" containerID="6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.518861 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-config" (OuterVolumeSpecName: "config") pod "ced291de-0920-46ed-a3e0-2c064b072df0" (UID: "ced291de-0920-46ed-a3e0-2c064b072df0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.525689 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ced291de-0920-46ed-a3e0-2c064b072df0" (UID: "ced291de-0920-46ed-a3e0-2c064b072df0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.542570 4721 scope.go:117] "RemoveContainer" containerID="8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861" Jan 30 21:52:50 crc kubenswrapper[4721]: E0130 21:52:50.545251 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861\": container with ID starting with 8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861 not found: ID does not exist" containerID="8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.545356 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861"} err="failed to get container status \"8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861\": rpc error: code = NotFound desc = could not find container \"8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861\": container with ID starting with 8f93ee3ca62d414576f92764ed0666b41dce8eb6eaff97e8be829c5e0d3df861 not found: ID does not exist" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.545382 4721 scope.go:117] "RemoveContainer" containerID="6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b" Jan 30 21:52:50 crc kubenswrapper[4721]: E0130 21:52:50.548565 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b\": container with ID starting with 6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b not found: ID does not exist" containerID="6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.548625 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b"} err="failed to get container status \"6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b\": rpc error: code = NotFound desc = could not find container \"6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b\": container with ID starting with 6587369fdbfcd28d5a216ed14a5bec9fecfb2a03cf048ea09256d5717ae5593b not found: ID does not exist" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.611750 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.611795 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.611809 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ced291de-0920-46ed-a3e0-2c064b072df0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.645235 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-46g5v"] Jan 30 21:52:50 crc kubenswrapper[4721]: W0130 21:52:50.659946 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf511802d_cde5_4900_8a57_b06ebf1bab3b.slice/crio-d263f7289fec8b5e70de8ecbc0695297714d6de11370fe85f62d1460310d479f WatchSource:0}: Error finding container d263f7289fec8b5e70de8ecbc0695297714d6de11370fe85f62d1460310d479f: Status 404 returned error can't find the container with id d263f7289fec8b5e70de8ecbc0695297714d6de11370fe85f62d1460310d479f Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.906635 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zrv56"] Jan 30 21:52:50 crc kubenswrapper[4721]: I0130 21:52:50.918584 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zrv56"] Jan 30 21:52:51 crc kubenswrapper[4721]: I0130 21:52:51.486380 4721 generic.go:334] "Generic (PLEG): container finished" podID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerID="02a489a281d178eab40a06dacd76eb7c2008f5ada292fc60bcb35d511637d4de" exitCode=0 Jan 30 21:52:51 crc kubenswrapper[4721]: I0130 21:52:51.486461 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zztpc" event={"ID":"3b94fabe-19f8-485d-9b4d-592dd9f4e706","Type":"ContainerDied","Data":"02a489a281d178eab40a06dacd76eb7c2008f5ada292fc60bcb35d511637d4de"} Jan 30 21:52:51 crc kubenswrapper[4721]: I0130 21:52:51.488337 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-46g5v" event={"ID":"f511802d-cde5-4900-8a57-b06ebf1bab3b","Type":"ContainerStarted","Data":"04beb6d06e4ab9cdeef1d0239db123ec2fffe6856309c35ed9c462e9c8b8d282"} Jan 30 21:52:51 crc kubenswrapper[4721]: I0130 21:52:51.488379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-46g5v" event={"ID":"f511802d-cde5-4900-8a57-b06ebf1bab3b","Type":"ContainerStarted","Data":"d263f7289fec8b5e70de8ecbc0695297714d6de11370fe85f62d1460310d479f"} Jan 30 21:52:51 crc kubenswrapper[4721]: I0130 21:52:51.538348 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-46g5v" podStartSLOduration=2.538322257 podStartE2EDuration="2.538322257s" podCreationTimestamp="2026-01-30 21:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:52:51.532761714 +0000 UTC m=+2160.324662960" watchObservedRunningTime="2026-01-30 21:52:51.538322257 +0000 UTC m=+2160.330223503" Jan 30 21:52:52 crc kubenswrapper[4721]: I0130 21:52:52.117467 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" path="/var/lib/kubelet/pods/ced291de-0920-46ed-a3e0-2c064b072df0/volumes" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.099361 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4n25s"] Jan 30 21:52:53 crc kubenswrapper[4721]: E0130 21:52:53.100534 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="init" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.100552 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="init" Jan 30 21:52:53 crc kubenswrapper[4721]: E0130 21:52:53.100594 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="dnsmasq-dns" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.100602 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="dnsmasq-dns" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.100827 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="dnsmasq-dns" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.106052 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.112345 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4n25s"] Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.186497 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvxkg\" (UniqueName: \"kubernetes.io/projected/bcfba918-3cbe-431d-95e4-7ad611a264bd-kube-api-access-qvxkg\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.186618 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-catalog-content\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.186655 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-utilities\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.299028 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvxkg\" (UniqueName: \"kubernetes.io/projected/bcfba918-3cbe-431d-95e4-7ad611a264bd-kube-api-access-qvxkg\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.299155 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-catalog-content\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.299196 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-utilities\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.299780 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-utilities\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.299843 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-catalog-content\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.332169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvxkg\" (UniqueName: \"kubernetes.io/projected/bcfba918-3cbe-431d-95e4-7ad611a264bd-kube-api-access-qvxkg\") pod \"certified-operators-4n25s\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.428752 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.514320 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerStarted","Data":"683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84"} Jan 30 21:52:53 crc kubenswrapper[4721]: I0130 21:52:53.995995 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4n25s"] Jan 30 21:52:54 crc kubenswrapper[4721]: W0130 21:52:54.010443 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcfba918_3cbe_431d_95e4_7ad611a264bd.slice/crio-5a1ef4e587b69b2559199561a1a2b42a188bf8c8aa1ac4c49a3777a86711325d WatchSource:0}: Error finding container 5a1ef4e587b69b2559199561a1a2b42a188bf8c8aa1ac4c49a3777a86711325d: Status 404 returned error can't find the container with id 5a1ef4e587b69b2559199561a1a2b42a188bf8c8aa1ac4c49a3777a86711325d Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.526831 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zztpc" event={"ID":"3b94fabe-19f8-485d-9b4d-592dd9f4e706","Type":"ContainerStarted","Data":"c0ccd86017f4df4c05d74b1a46ee05a0b2a9953d13c20acc4da4a23d4365c104"} Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.528938 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerID="1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186" exitCode=0 Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.529046 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerDied","Data":"1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186"} Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.529077 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerStarted","Data":"5a1ef4e587b69b2559199561a1a2b42a188bf8c8aa1ac4c49a3777a86711325d"} Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.533051 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerStarted","Data":"0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67"} Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.553823 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zztpc" podStartSLOduration=4.631143436 podStartE2EDuration="10.553800234s" podCreationTimestamp="2026-01-30 21:52:44 +0000 UTC" firstStartedPulling="2026-01-30 21:52:47.412220687 +0000 UTC m=+2156.204121943" lastFinishedPulling="2026-01-30 21:52:53.334877495 +0000 UTC m=+2162.126778741" observedRunningTime="2026-01-30 21:52:54.545649302 +0000 UTC m=+2163.337550558" watchObservedRunningTime="2026-01-30 21:52:54.553800234 +0000 UTC m=+2163.345701490" Jan 30 21:52:54 crc kubenswrapper[4721]: I0130 21:52:54.962448 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78cd565959-zrv56" podUID="ced291de-0920-46ed-a3e0-2c064b072df0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.220:5353: i/o timeout" Jan 30 21:52:55 crc kubenswrapper[4721]: I0130 21:52:55.115243 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:55 crc kubenswrapper[4721]: I0130 21:52:55.115500 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:52:56 crc kubenswrapper[4721]: I0130 21:52:56.179672 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-zztpc" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="registry-server" probeResult="failure" output=< Jan 30 21:52:56 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:52:56 crc kubenswrapper[4721]: > Jan 30 21:52:56 crc kubenswrapper[4721]: I0130 21:52:56.827026 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:52:56 crc kubenswrapper[4721]: I0130 21:52:56.827657 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.263772 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cb74w"] Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.265910 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.285657 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cb74w"] Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.386588 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-utilities\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.386767 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbm7z\" (UniqueName: \"kubernetes.io/projected/47c095fd-4cfd-4518-9b5d-e71838d800a7-kube-api-access-vbm7z\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.387012 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-catalog-content\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.488964 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-utilities\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.489084 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbm7z\" (UniqueName: \"kubernetes.io/projected/47c095fd-4cfd-4518-9b5d-e71838d800a7-kube-api-access-vbm7z\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.489161 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-catalog-content\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.489499 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-utilities\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.489594 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-catalog-content\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.512922 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbm7z\" (UniqueName: \"kubernetes.io/projected/47c095fd-4cfd-4518-9b5d-e71838d800a7-kube-api-access-vbm7z\") pod \"redhat-operators-cb74w\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.621707 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.848624 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.234:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:57 crc kubenswrapper[4721]: I0130 21:52:57.848670 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.234:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:52:58 crc kubenswrapper[4721]: I0130 21:52:58.238771 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cb74w"] Jan 30 21:52:58 crc kubenswrapper[4721]: I0130 21:52:58.577285 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerStarted","Data":"f44c26bf727976f625f7608d24526418d394dded8dac463e6122c86d4d60feb4"} Jan 30 21:52:58 crc kubenswrapper[4721]: I0130 21:52:58.579196 4721 generic.go:334] "Generic (PLEG): container finished" podID="f511802d-cde5-4900-8a57-b06ebf1bab3b" containerID="04beb6d06e4ab9cdeef1d0239db123ec2fffe6856309c35ed9c462e9c8b8d282" exitCode=0 Jan 30 21:52:58 crc kubenswrapper[4721]: I0130 21:52:58.579249 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-46g5v" event={"ID":"f511802d-cde5-4900-8a57-b06ebf1bab3b","Type":"ContainerDied","Data":"04beb6d06e4ab9cdeef1d0239db123ec2fffe6856309c35ed9c462e9c8b8d282"} Jan 30 21:52:59 crc kubenswrapper[4721]: I0130 21:52:59.449234 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:52:59 crc kubenswrapper[4721]: I0130 21:52:59.449558 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:52:59 crc kubenswrapper[4721]: I0130 21:52:59.594908 4721 generic.go:334] "Generic (PLEG): container finished" podID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerID="8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7" exitCode=0 Jan 30 21:52:59 crc kubenswrapper[4721]: I0130 21:52:59.597185 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerDied","Data":"8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7"} Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.449287 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.578250 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d922v\" (UniqueName: \"kubernetes.io/projected/f511802d-cde5-4900-8a57-b06ebf1bab3b-kube-api-access-d922v\") pod \"f511802d-cde5-4900-8a57-b06ebf1bab3b\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.578934 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-config-data\") pod \"f511802d-cde5-4900-8a57-b06ebf1bab3b\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.578975 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-scripts\") pod \"f511802d-cde5-4900-8a57-b06ebf1bab3b\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.579186 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-combined-ca-bundle\") pod \"f511802d-cde5-4900-8a57-b06ebf1bab3b\" (UID: \"f511802d-cde5-4900-8a57-b06ebf1bab3b\") " Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.586046 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-scripts" (OuterVolumeSpecName: "scripts") pod "f511802d-cde5-4900-8a57-b06ebf1bab3b" (UID: "f511802d-cde5-4900-8a57-b06ebf1bab3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.593455 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f511802d-cde5-4900-8a57-b06ebf1bab3b-kube-api-access-d922v" (OuterVolumeSpecName: "kube-api-access-d922v") pod "f511802d-cde5-4900-8a57-b06ebf1bab3b" (UID: "f511802d-cde5-4900-8a57-b06ebf1bab3b"). InnerVolumeSpecName "kube-api-access-d922v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.640523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-config-data" (OuterVolumeSpecName: "config-data") pod "f511802d-cde5-4900-8a57-b06ebf1bab3b" (UID: "f511802d-cde5-4900-8a57-b06ebf1bab3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.648070 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-46g5v" event={"ID":"f511802d-cde5-4900-8a57-b06ebf1bab3b","Type":"ContainerDied","Data":"d263f7289fec8b5e70de8ecbc0695297714d6de11370fe85f62d1460310d479f"} Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.648118 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d263f7289fec8b5e70de8ecbc0695297714d6de11370fe85f62d1460310d479f" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.648213 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-46g5v" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.654647 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f511802d-cde5-4900-8a57-b06ebf1bab3b" (UID: "f511802d-cde5-4900-8a57-b06ebf1bab3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.684361 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d922v\" (UniqueName: \"kubernetes.io/projected/f511802d-cde5-4900-8a57-b06ebf1bab3b-kube-api-access-d922v\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.684390 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.684401 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.684414 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f511802d-cde5-4900-8a57-b06ebf1bab3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.689175 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerStarted","Data":"a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f"} Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.876587 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.876893 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-log" containerID="cri-o://c53da16978f0d2ee975b46a677e71f11af09afb226c2348398de3807af7e56ca" gracePeriod=30 Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.877045 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-api" containerID="cri-o://139ce157571dd497f3e623e750eb27e61eca7d2bb430d6c844ef92cdc1e94078" gracePeriod=30 Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.899204 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.899535 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" containerName="nova-scheduler-scheduler" containerID="cri-o://82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b" gracePeriod=30 Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.922714 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.924166 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-log" containerID="cri-o://fe5d56a0a24da9a13580bd6abdb1c4d1c4e9407745375745a50708110fb24896" gracePeriod=30 Jan 30 21:53:00 crc kubenswrapper[4721]: I0130 21:53:00.924749 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-metadata" containerID="cri-o://61f14b05955e6aadc95309a876881121bb5f5d42039761e660b82bb840e4f843" gracePeriod=30 Jan 30 21:53:01 crc kubenswrapper[4721]: I0130 21:53:01.703282 4721 generic.go:334] "Generic (PLEG): container finished" podID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerID="c53da16978f0d2ee975b46a677e71f11af09afb226c2348398de3807af7e56ca" exitCode=143 Jan 30 21:53:01 crc kubenswrapper[4721]: I0130 21:53:01.703368 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1f26399-4d0d-4ec2-a777-21c0fff8d509","Type":"ContainerDied","Data":"c53da16978f0d2ee975b46a677e71f11af09afb226c2348398de3807af7e56ca"} Jan 30 21:53:01 crc kubenswrapper[4721]: I0130 21:53:01.708645 4721 generic.go:334] "Generic (PLEG): container finished" podID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerID="fe5d56a0a24da9a13580bd6abdb1c4d1c4e9407745375745a50708110fb24896" exitCode=143 Jan 30 21:53:01 crc kubenswrapper[4721]: I0130 21:53:01.708731 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d78ef6a-c771-40a8-b988-042f40e7d0e2","Type":"ContainerDied","Data":"fe5d56a0a24da9a13580bd6abdb1c4d1c4e9407745375745a50708110fb24896"} Jan 30 21:53:01 crc kubenswrapper[4721]: I0130 21:53:01.711470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerStarted","Data":"98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742"} Jan 30 21:53:01 crc kubenswrapper[4721]: I0130 21:53:01.745432 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.996100218 podStartE2EDuration="15.745409464s" podCreationTimestamp="2026-01-30 21:52:46 +0000 UTC" firstStartedPulling="2026-01-30 21:52:47.490383421 +0000 UTC m=+2156.282284687" lastFinishedPulling="2026-01-30 21:53:00.239692687 +0000 UTC m=+2169.031593933" observedRunningTime="2026-01-30 21:53:01.733866726 +0000 UTC m=+2170.525767972" watchObservedRunningTime="2026-01-30 21:53:01.745409464 +0000 UTC m=+2170.537310700" Jan 30 21:53:02 crc kubenswrapper[4721]: I0130 21:53:02.727748 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerStarted","Data":"5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0"} Jan 30 21:53:02 crc kubenswrapper[4721]: I0130 21:53:02.728068 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:53:04 crc kubenswrapper[4721]: I0130 21:53:04.068552 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": read tcp 10.217.0.2:40300->10.217.0.224:8775: read: connection reset by peer" Jan 30 21:53:04 crc kubenswrapper[4721]: I0130 21:53:04.069427 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": read tcp 10.217.0.2:40288->10.217.0.224:8775: read: connection reset by peer" Jan 30 21:53:04 crc kubenswrapper[4721]: I0130 21:53:04.750407 4721 generic.go:334] "Generic (PLEG): container finished" podID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerID="61f14b05955e6aadc95309a876881121bb5f5d42039761e660b82bb840e4f843" exitCode=0 Jan 30 21:53:04 crc kubenswrapper[4721]: I0130 21:53:04.750461 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d78ef6a-c771-40a8-b988-042f40e7d0e2","Type":"ContainerDied","Data":"61f14b05955e6aadc95309a876881121bb5f5d42039761e660b82bb840e4f843"} Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.172337 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:53:05 crc kubenswrapper[4721]: E0130 21:53:05.211853 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b is running failed: container process not found" containerID="82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 21:53:05 crc kubenswrapper[4721]: E0130 21:53:05.212491 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b is running failed: container process not found" containerID="82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 21:53:05 crc kubenswrapper[4721]: E0130 21:53:05.212836 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b is running failed: container process not found" containerID="82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 30 21:53:05 crc kubenswrapper[4721]: E0130 21:53:05.212872 4721 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" containerName="nova-scheduler-scheduler" Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.213880 4721 scope.go:117] "RemoveContainer" containerID="6c38b9a2de24bf31b2faa93142af1bb5824a774cb81865b546dc364e9583174a" Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.237694 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.289533 4721 scope.go:117] "RemoveContainer" containerID="9f6327d9740376f62d8ae92558a1df0c1debdc419b728c750b772a74a10efd5c" Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.370706 4721 scope.go:117] "RemoveContainer" containerID="5699b25289d43cae7b988fcd7bdc8676a38e2797b16d648f600dd6a3dc5d83ee" Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.771399 4721 generic.go:334] "Generic (PLEG): container finished" podID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" containerID="82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b" exitCode=0 Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.771620 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d870c1ca-99c0-4f06-93d1-299d2827d0fe","Type":"ContainerDied","Data":"82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b"} Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.791772 4721 generic.go:334] "Generic (PLEG): container finished" podID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerID="139ce157571dd497f3e623e750eb27e61eca7d2bb430d6c844ef92cdc1e94078" exitCode=0 Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.791848 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1f26399-4d0d-4ec2-a777-21c0fff8d509","Type":"ContainerDied","Data":"139ce157571dd497f3e623e750eb27e61eca7d2bb430d6c844ef92cdc1e94078"} Jan 30 21:53:05 crc kubenswrapper[4721]: I0130 21:53:05.987660 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.067691 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.136357 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-nova-metadata-tls-certs\") pod \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.136469 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-config-data\") pod \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.136501 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clbzn\" (UniqueName: \"kubernetes.io/projected/6d78ef6a-c771-40a8-b988-042f40e7d0e2-kube-api-access-clbzn\") pod \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.136536 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-combined-ca-bundle\") pod \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.136734 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d78ef6a-c771-40a8-b988-042f40e7d0e2-logs\") pod \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\" (UID: \"6d78ef6a-c771-40a8-b988-042f40e7d0e2\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.150108 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d78ef6a-c771-40a8-b988-042f40e7d0e2-logs" (OuterVolumeSpecName: "logs") pod "6d78ef6a-c771-40a8-b988-042f40e7d0e2" (UID: "6d78ef6a-c771-40a8-b988-042f40e7d0e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.169529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d78ef6a-c771-40a8-b988-042f40e7d0e2-kube-api-access-clbzn" (OuterVolumeSpecName: "kube-api-access-clbzn") pod "6d78ef6a-c771-40a8-b988-042f40e7d0e2" (UID: "6d78ef6a-c771-40a8-b988-042f40e7d0e2"). InnerVolumeSpecName "kube-api-access-clbzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.199535 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d78ef6a-c771-40a8-b988-042f40e7d0e2" (UID: "6d78ef6a-c771-40a8-b988-042f40e7d0e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.203887 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-config-data" (OuterVolumeSpecName: "config-data") pod "6d78ef6a-c771-40a8-b988-042f40e7d0e2" (UID: "6d78ef6a-c771-40a8-b988-042f40e7d0e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.232974 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6d78ef6a-c771-40a8-b988-042f40e7d0e2" (UID: "6d78ef6a-c771-40a8-b988-042f40e7d0e2"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.238742 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-combined-ca-bundle\") pod \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.238916 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-config-data\") pod \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.239013 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpl5n\" (UniqueName: \"kubernetes.io/projected/d870c1ca-99c0-4f06-93d1-299d2827d0fe-kube-api-access-jpl5n\") pod \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\" (UID: \"d870c1ca-99c0-4f06-93d1-299d2827d0fe\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.239915 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.239973 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clbzn\" (UniqueName: \"kubernetes.io/projected/6d78ef6a-c771-40a8-b988-042f40e7d0e2-kube-api-access-clbzn\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.239984 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.239992 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d78ef6a-c771-40a8-b988-042f40e7d0e2-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.240022 4721 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d78ef6a-c771-40a8-b988-042f40e7d0e2-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.246258 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d870c1ca-99c0-4f06-93d1-299d2827d0fe-kube-api-access-jpl5n" (OuterVolumeSpecName: "kube-api-access-jpl5n") pod "d870c1ca-99c0-4f06-93d1-299d2827d0fe" (UID: "d870c1ca-99c0-4f06-93d1-299d2827d0fe"). InnerVolumeSpecName "kube-api-access-jpl5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.273749 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-config-data" (OuterVolumeSpecName: "config-data") pod "d870c1ca-99c0-4f06-93d1-299d2827d0fe" (UID: "d870c1ca-99c0-4f06-93d1-299d2827d0fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.286494 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d870c1ca-99c0-4f06-93d1-299d2827d0fe" (UID: "d870c1ca-99c0-4f06-93d1-299d2827d0fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.342283 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.342410 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d870c1ca-99c0-4f06-93d1-299d2827d0fe-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.342424 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpl5n\" (UniqueName: \"kubernetes.io/projected/d870c1ca-99c0-4f06-93d1-299d2827d0fe-kube-api-access-jpl5n\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.406278 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.448854 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zztpc"] Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.545447 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-config-data\") pod \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.545545 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1f26399-4d0d-4ec2-a777-21c0fff8d509-logs\") pod \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.545731 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-combined-ca-bundle\") pod \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.545752 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-internal-tls-certs\") pod \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.545814 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ltgz\" (UniqueName: \"kubernetes.io/projected/b1f26399-4d0d-4ec2-a777-21c0fff8d509-kube-api-access-4ltgz\") pod \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.545866 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-public-tls-certs\") pod \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\" (UID: \"b1f26399-4d0d-4ec2-a777-21c0fff8d509\") " Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.546617 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1f26399-4d0d-4ec2-a777-21c0fff8d509-logs" (OuterVolumeSpecName: "logs") pod "b1f26399-4d0d-4ec2-a777-21c0fff8d509" (UID: "b1f26399-4d0d-4ec2-a777-21c0fff8d509"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.559536 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1f26399-4d0d-4ec2-a777-21c0fff8d509-kube-api-access-4ltgz" (OuterVolumeSpecName: "kube-api-access-4ltgz") pod "b1f26399-4d0d-4ec2-a777-21c0fff8d509" (UID: "b1f26399-4d0d-4ec2-a777-21c0fff8d509"). InnerVolumeSpecName "kube-api-access-4ltgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.597620 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-config-data" (OuterVolumeSpecName: "config-data") pod "b1f26399-4d0d-4ec2-a777-21c0fff8d509" (UID: "b1f26399-4d0d-4ec2-a777-21c0fff8d509"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.621454 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1f26399-4d0d-4ec2-a777-21c0fff8d509" (UID: "b1f26399-4d0d-4ec2-a777-21c0fff8d509"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.631673 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b1f26399-4d0d-4ec2-a777-21c0fff8d509" (UID: "b1f26399-4d0d-4ec2-a777-21c0fff8d509"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.634047 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b1f26399-4d0d-4ec2-a777-21c0fff8d509" (UID: "b1f26399-4d0d-4ec2-a777-21c0fff8d509"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.648239 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.648272 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1f26399-4d0d-4ec2-a777-21c0fff8d509-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.648282 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.648294 4721 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.648303 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ltgz\" (UniqueName: \"kubernetes.io/projected/b1f26399-4d0d-4ec2-a777-21c0fff8d509-kube-api-access-4ltgz\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.648330 4721 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1f26399-4d0d-4ec2-a777-21c0fff8d509-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.804675 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.804703 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d870c1ca-99c0-4f06-93d1-299d2827d0fe","Type":"ContainerDied","Data":"0df9ff92ed099784ec930d736ce018d6cfd53dd1fca37bbfcd27385d8cc87eb6"} Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.804758 4721 scope.go:117] "RemoveContainer" containerID="82efdd6a021b56870e7ec67595181ce5e66ae54d776604893241d6e1892c8e4b" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.807139 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1f26399-4d0d-4ec2-a777-21c0fff8d509","Type":"ContainerDied","Data":"344a6fa36b2cb325b665f0bcfee0ebe79c86a5d19fa2a067110b29f3543aada5"} Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.807241 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.819316 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zztpc" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="registry-server" containerID="cri-o://c0ccd86017f4df4c05d74b1a46ee05a0b2a9953d13c20acc4da4a23d4365c104" gracePeriod=2 Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.819633 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.822715 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d78ef6a-c771-40a8-b988-042f40e7d0e2","Type":"ContainerDied","Data":"f5690b9354fdbfcb8e5ae82d8ee2d509333c16fa379f6652caf37ffc9ac80571"} Jan 30 21:53:06 crc kubenswrapper[4721]: I0130 21:53:06.829665 4721 scope.go:117] "RemoveContainer" containerID="139ce157571dd497f3e623e750eb27e61eca7d2bb430d6c844ef92cdc1e94078" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.016527 4721 scope.go:117] "RemoveContainer" containerID="c53da16978f0d2ee975b46a677e71f11af09afb226c2348398de3807af7e56ca" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.031873 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.044310 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.075401 4721 scope.go:117] "RemoveContainer" containerID="61f14b05955e6aadc95309a876881121bb5f5d42039761e660b82bb840e4f843" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.087126 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.121041 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.122595 4721 scope.go:117] "RemoveContainer" containerID="fe5d56a0a24da9a13580bd6abdb1c4d1c4e9407745375745a50708110fb24896" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.142737 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: E0130 21:53:07.145445 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" containerName="nova-scheduler-scheduler" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.145650 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" containerName="nova-scheduler-scheduler" Jan 30 21:53:07 crc kubenswrapper[4721]: E0130 21:53:07.145679 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f511802d-cde5-4900-8a57-b06ebf1bab3b" containerName="nova-manage" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.145686 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f511802d-cde5-4900-8a57-b06ebf1bab3b" containerName="nova-manage" Jan 30 21:53:07 crc kubenswrapper[4721]: E0130 21:53:07.145692 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-metadata" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.145698 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-metadata" Jan 30 21:53:07 crc kubenswrapper[4721]: E0130 21:53:07.145888 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-log" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.145896 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-log" Jan 30 21:53:07 crc kubenswrapper[4721]: E0130 21:53:07.145912 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-log" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.145920 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-log" Jan 30 21:53:07 crc kubenswrapper[4721]: E0130 21:53:07.146126 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-api" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.146136 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-api" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.147150 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" containerName="nova-scheduler-scheduler" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.147419 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-log" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.147452 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" containerName="nova-api-api" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.147471 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-metadata" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.147880 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" containerName="nova-metadata-log" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.147902 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f511802d-cde5-4900-8a57-b06ebf1bab3b" containerName="nova-manage" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.151593 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.153786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.153911 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.155871 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.156336 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.164154 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.174547 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.179861 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.185964 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.188488 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.198708 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.210980 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.213157 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.216694 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.217665 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.223587 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.283712 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ssx6\" (UniqueName: \"kubernetes.io/projected/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-kube-api-access-4ssx6\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.283827 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-config-data\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.283883 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-logs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.283917 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.283944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-config-data\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.283986 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.284018 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz2v6\" (UniqueName: \"kubernetes.io/projected/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-kube-api-access-pz2v6\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.284072 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-internal-tls-certs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.284093 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-public-tls-certs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.385825 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/766e6806-c4e1-4db9-9c4e-93a466d182f1-logs\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.385877 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-logs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.385907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wflm8\" (UniqueName: \"kubernetes.io/projected/766e6806-c4e1-4db9-9c4e-93a466d182f1-kube-api-access-wflm8\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.385930 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386019 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-config-data\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386107 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386194 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz2v6\" (UniqueName: \"kubernetes.io/projected/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-kube-api-access-pz2v6\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386344 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-internal-tls-certs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386370 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-logs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386373 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-public-tls-certs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386541 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ssx6\" (UniqueName: \"kubernetes.io/projected/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-kube-api-access-4ssx6\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386628 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386660 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386734 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-config-data\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.386763 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-config-data\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.390404 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-internal-tls-certs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.391409 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-config-data\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.391526 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.391602 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.391878 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-public-tls-certs\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.392094 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-config-data\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.405883 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz2v6\" (UniqueName: \"kubernetes.io/projected/03dc15ed-69f7-4a98-b586-a7e051ba2bbe-kube-api-access-pz2v6\") pod \"nova-api-0\" (UID: \"03dc15ed-69f7-4a98-b586-a7e051ba2bbe\") " pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.411961 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ssx6\" (UniqueName: \"kubernetes.io/projected/2e8d2389-e04d-4427-8e1e-ef0d8617a29f-kube-api-access-4ssx6\") pod \"nova-scheduler-0\" (UID: \"2e8d2389-e04d-4427-8e1e-ef0d8617a29f\") " pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.477529 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.488721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.488770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.488814 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-config-data\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.488847 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/766e6806-c4e1-4db9-9c4e-93a466d182f1-logs\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.488877 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wflm8\" (UniqueName: \"kubernetes.io/projected/766e6806-c4e1-4db9-9c4e-93a466d182f1-kube-api-access-wflm8\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.489581 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/766e6806-c4e1-4db9-9c4e-93a466d182f1-logs\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.492998 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.493252 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-config-data\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.493956 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766e6806-c4e1-4db9-9c4e-93a466d182f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.503606 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.525008 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wflm8\" (UniqueName: \"kubernetes.io/projected/766e6806-c4e1-4db9-9c4e-93a466d182f1-kube-api-access-wflm8\") pod \"nova-metadata-0\" (UID: \"766e6806-c4e1-4db9-9c4e-93a466d182f1\") " pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.542414 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.839115 4721 generic.go:334] "Generic (PLEG): container finished" podID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerID="c0ccd86017f4df4c05d74b1a46ee05a0b2a9953d13c20acc4da4a23d4365c104" exitCode=0 Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.839165 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zztpc" event={"ID":"3b94fabe-19f8-485d-9b4d-592dd9f4e706","Type":"ContainerDied","Data":"c0ccd86017f4df4c05d74b1a46ee05a0b2a9953d13c20acc4da4a23d4365c104"} Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.845739 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerID="a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f" exitCode=0 Jan 30 21:53:07 crc kubenswrapper[4721]: I0130 21:53:07.845786 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerDied","Data":"a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.008169 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 30 21:53:08 crc kubenswrapper[4721]: W0130 21:53:08.009789 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03dc15ed_69f7_4a98_b586_a7e051ba2bbe.slice/crio-edb04c8f98afa2da5f7a1c6d1713c212d6a52bef9ce0e58d6f7e1182f2656026 WatchSource:0}: Error finding container edb04c8f98afa2da5f7a1c6d1713c212d6a52bef9ce0e58d6f7e1182f2656026: Status 404 returned error can't find the container with id edb04c8f98afa2da5f7a1c6d1713c212d6a52bef9ce0e58d6f7e1182f2656026 Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.124215 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d78ef6a-c771-40a8-b988-042f40e7d0e2" path="/var/lib/kubelet/pods/6d78ef6a-c771-40a8-b988-042f40e7d0e2/volumes" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.125037 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1f26399-4d0d-4ec2-a777-21c0fff8d509" path="/var/lib/kubelet/pods/b1f26399-4d0d-4ec2-a777-21c0fff8d509/volumes" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.125868 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d870c1ca-99c0-4f06-93d1-299d2827d0fe" path="/var/lib/kubelet/pods/d870c1ca-99c0-4f06-93d1-299d2827d0fe/volumes" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.131255 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:53:08 crc kubenswrapper[4721]: W0130 21:53:08.204895 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e8d2389_e04d_4427_8e1e_ef0d8617a29f.slice/crio-9c6e6f1ac6e8601f6b353304833dd21017894559d205262896f358c83e50f59e WatchSource:0}: Error finding container 9c6e6f1ac6e8601f6b353304833dd21017894559d205262896f358c83e50f59e: Status 404 returned error can't find the container with id 9c6e6f1ac6e8601f6b353304833dd21017894559d205262896f358c83e50f59e Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.209961 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-utilities\") pod \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.210025 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mxk6\" (UniqueName: \"kubernetes.io/projected/3b94fabe-19f8-485d-9b4d-592dd9f4e706-kube-api-access-5mxk6\") pod \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.210248 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-catalog-content\") pod \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\" (UID: \"3b94fabe-19f8-485d-9b4d-592dd9f4e706\") " Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.210848 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-utilities" (OuterVolumeSpecName: "utilities") pod "3b94fabe-19f8-485d-9b4d-592dd9f4e706" (UID: "3b94fabe-19f8-485d-9b4d-592dd9f4e706"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.213734 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.247459 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b94fabe-19f8-485d-9b4d-592dd9f4e706" (UID: "3b94fabe-19f8-485d-9b4d-592dd9f4e706"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.248038 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b94fabe-19f8-485d-9b4d-592dd9f4e706-kube-api-access-5mxk6" (OuterVolumeSpecName: "kube-api-access-5mxk6") pod "3b94fabe-19f8-485d-9b4d-592dd9f4e706" (UID: "3b94fabe-19f8-485d-9b4d-592dd9f4e706"). InnerVolumeSpecName "kube-api-access-5mxk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.312492 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.312533 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mxk6\" (UniqueName: \"kubernetes.io/projected/3b94fabe-19f8-485d-9b4d-592dd9f4e706-kube-api-access-5mxk6\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.312546 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b94fabe-19f8-485d-9b4d-592dd9f4e706-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.384450 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.857880 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"766e6806-c4e1-4db9-9c4e-93a466d182f1","Type":"ContainerStarted","Data":"2a75a92f8cb4ffbc6662726430b0694944c6f507d5e7b95cb6049c7ead7fea9a"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.860316 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"03dc15ed-69f7-4a98-b586-a7e051ba2bbe","Type":"ContainerStarted","Data":"e991d3cedaeba39f651747f604d0074f5775f495f839ac1342e86c9331ced57e"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.860361 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"03dc15ed-69f7-4a98-b586-a7e051ba2bbe","Type":"ContainerStarted","Data":"edb04c8f98afa2da5f7a1c6d1713c212d6a52bef9ce0e58d6f7e1182f2656026"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.863436 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zztpc" event={"ID":"3b94fabe-19f8-485d-9b4d-592dd9f4e706","Type":"ContainerDied","Data":"50695f8b7f9a047282defcb5001b6ae5b7a50b80c7c998539723f928cda41cfa"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.863473 4721 scope.go:117] "RemoveContainer" containerID="c0ccd86017f4df4c05d74b1a46ee05a0b2a9953d13c20acc4da4a23d4365c104" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.863573 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zztpc" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.876507 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2e8d2389-e04d-4427-8e1e-ef0d8617a29f","Type":"ContainerStarted","Data":"29f67246fb89d7964f86529064674711c5515dcd84977bd9a191de417fb7a471"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.877416 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2e8d2389-e04d-4427-8e1e-ef0d8617a29f","Type":"ContainerStarted","Data":"9c6e6f1ac6e8601f6b353304833dd21017894559d205262896f358c83e50f59e"} Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.904231 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zztpc"] Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.914206 4721 scope.go:117] "RemoveContainer" containerID="02a489a281d178eab40a06dacd76eb7c2008f5ada292fc60bcb35d511637d4de" Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.915024 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zztpc"] Jan 30 21:53:08 crc kubenswrapper[4721]: I0130 21:53:08.945650 4721 scope.go:117] "RemoveContainer" containerID="b609aabcbf96056f02aef72f22c8ef8027cf9a245cdf447fb6b26703e02e4b5f" Jan 30 21:53:10 crc kubenswrapper[4721]: I0130 21:53:10.108885 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" path="/var/lib/kubelet/pods/3b94fabe-19f8-485d-9b4d-592dd9f4e706/volumes" Jan 30 21:53:10 crc kubenswrapper[4721]: I0130 21:53:10.898730 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"766e6806-c4e1-4db9-9c4e-93a466d182f1","Type":"ContainerStarted","Data":"241b6d40abc2467bde42f70dbe39b9069c51aa15f4b0b031dfe9facbb4feaa8c"} Jan 30 21:53:10 crc kubenswrapper[4721]: I0130 21:53:10.902103 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"03dc15ed-69f7-4a98-b586-a7e051ba2bbe","Type":"ContainerStarted","Data":"9cdb4ca67cfb44efceaaf541e9affa2ba2fa9313db83ce711c520dfce2a3bca3"} Jan 30 21:53:10 crc kubenswrapper[4721]: I0130 21:53:10.943680 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.943655138 podStartE2EDuration="3.943655138s" podCreationTimestamp="2026-01-30 21:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:53:10.923715069 +0000 UTC m=+2179.715616335" watchObservedRunningTime="2026-01-30 21:53:10.943655138 +0000 UTC m=+2179.735556404" Jan 30 21:53:10 crc kubenswrapper[4721]: I0130 21:53:10.958430 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.958408726 podStartE2EDuration="3.958408726s" podCreationTimestamp="2026-01-30 21:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:53:10.941738009 +0000 UTC m=+2179.733639265" watchObservedRunningTime="2026-01-30 21:53:10.958408726 +0000 UTC m=+2179.750309982" Jan 30 21:53:11 crc kubenswrapper[4721]: I0130 21:53:11.915476 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerStarted","Data":"ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5"} Jan 30 21:53:11 crc kubenswrapper[4721]: I0130 21:53:11.919001 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"766e6806-c4e1-4db9-9c4e-93a466d182f1","Type":"ContainerStarted","Data":"fa0372264dfa7d5c08105fe0d0a1c7a9b85d0e661d787433e6dbd57bc0730087"} Jan 30 21:53:11 crc kubenswrapper[4721]: I0130 21:53:11.939583 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4n25s" podStartSLOduration=2.229548336 podStartE2EDuration="18.93955925s" podCreationTimestamp="2026-01-30 21:52:53 +0000 UTC" firstStartedPulling="2026-01-30 21:52:54.530500312 +0000 UTC m=+2163.322401558" lastFinishedPulling="2026-01-30 21:53:11.240511226 +0000 UTC m=+2180.032412472" observedRunningTime="2026-01-30 21:53:11.934470562 +0000 UTC m=+2180.726371838" watchObservedRunningTime="2026-01-30 21:53:11.93955925 +0000 UTC m=+2180.731460496" Jan 30 21:53:11 crc kubenswrapper[4721]: I0130 21:53:11.961619 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.961595923 podStartE2EDuration="4.961595923s" podCreationTimestamp="2026-01-30 21:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:53:11.952819632 +0000 UTC m=+2180.744720888" watchObservedRunningTime="2026-01-30 21:53:11.961595923 +0000 UTC m=+2180.753497169" Jan 30 21:53:12 crc kubenswrapper[4721]: I0130 21:53:12.504737 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 30 21:53:12 crc kubenswrapper[4721]: I0130 21:53:12.543103 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 21:53:12 crc kubenswrapper[4721]: I0130 21:53:12.543210 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 30 21:53:13 crc kubenswrapper[4721]: I0130 21:53:13.428928 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:53:13 crc kubenswrapper[4721]: I0130 21:53:13.430754 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:53:14 crc kubenswrapper[4721]: I0130 21:53:14.513354 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-4n25s" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" probeResult="failure" output=< Jan 30 21:53:14 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:53:14 crc kubenswrapper[4721]: > Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.477754 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.478394 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.505009 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.539208 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.543324 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.543373 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 30 21:53:17 crc kubenswrapper[4721]: I0130 21:53:17.544012 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 30 21:53:18 crc kubenswrapper[4721]: I0130 21:53:18.011516 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 30 21:53:18 crc kubenswrapper[4721]: I0130 21:53:18.492461 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="03dc15ed-69f7-4a98-b586-a7e051ba2bbe" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.239:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:53:18 crc kubenswrapper[4721]: I0130 21:53:18.492486 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="03dc15ed-69f7-4a98-b586-a7e051ba2bbe" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.239:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:53:18 crc kubenswrapper[4721]: I0130 21:53:18.558578 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="766e6806-c4e1-4db9-9c4e-93a466d182f1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.241:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 30 21:53:18 crc kubenswrapper[4721]: I0130 21:53:18.558595 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="766e6806-c4e1-4db9-9c4e-93a466d182f1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.241:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 30 21:53:24 crc kubenswrapper[4721]: I0130 21:53:24.505187 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-4n25s" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" probeResult="failure" output=< Jan 30 21:53:24 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:53:24 crc kubenswrapper[4721]: > Jan 30 21:53:26 crc kubenswrapper[4721]: I0130 21:53:26.057932 4721 generic.go:334] "Generic (PLEG): container finished" podID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerID="5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0" exitCode=0 Jan 30 21:53:26 crc kubenswrapper[4721]: I0130 21:53:26.058084 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerDied","Data":"5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0"} Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.485119 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.486049 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.488035 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.493983 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.551921 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.555890 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 30 21:53:27 crc kubenswrapper[4721]: I0130 21:53:27.557204 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 21:53:28 crc kubenswrapper[4721]: I0130 21:53:28.081185 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerStarted","Data":"de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a"} Jan 30 21:53:28 crc kubenswrapper[4721]: I0130 21:53:28.082142 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 30 21:53:28 crc kubenswrapper[4721]: I0130 21:53:28.105408 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 30 21:53:28 crc kubenswrapper[4721]: I0130 21:53:28.105471 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 30 21:53:28 crc kubenswrapper[4721]: I0130 21:53:28.115350 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cb74w" podStartSLOduration=3.855413667 podStartE2EDuration="31.115323833s" podCreationTimestamp="2026-01-30 21:52:57 +0000 UTC" firstStartedPulling="2026-01-30 21:52:59.711804433 +0000 UTC m=+2168.503705669" lastFinishedPulling="2026-01-30 21:53:26.971714589 +0000 UTC m=+2195.763615835" observedRunningTime="2026-01-30 21:53:28.103431185 +0000 UTC m=+2196.895332451" watchObservedRunningTime="2026-01-30 21:53:28.115323833 +0000 UTC m=+2196.907225099" Jan 30 21:53:29 crc kubenswrapper[4721]: I0130 21:53:29.449179 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:53:29 crc kubenswrapper[4721]: I0130 21:53:29.449557 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:53:34 crc kubenswrapper[4721]: I0130 21:53:34.485372 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-4n25s" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" probeResult="failure" output=< Jan 30 21:53:34 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:53:34 crc kubenswrapper[4721]: > Jan 30 21:53:37 crc kubenswrapper[4721]: I0130 21:53:37.621849 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:53:37 crc kubenswrapper[4721]: I0130 21:53:37.622225 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.352945 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-b98rm"] Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.364782 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-b98rm"] Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.490911 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-wsdsc"] Jan 30 21:53:38 crc kubenswrapper[4721]: E0130 21:53:38.491406 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="registry-server" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.491423 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="registry-server" Jan 30 21:53:38 crc kubenswrapper[4721]: E0130 21:53:38.491456 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="extract-content" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.491465 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="extract-content" Jan 30 21:53:38 crc kubenswrapper[4721]: E0130 21:53:38.491493 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="extract-utilities" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.491500 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="extract-utilities" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.491716 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b94fabe-19f8-485d-9b4d-592dd9f4e706" containerName="registry-server" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.492605 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.495267 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.509547 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-wsdsc"] Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.534696 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-certs\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.534764 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfwws\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-kube-api-access-zfwws\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.534819 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-config-data\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.534891 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-scripts\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.534988 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-combined-ca-bundle\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.637652 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfwws\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-kube-api-access-zfwws\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.637712 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-config-data\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.637744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-scripts\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.637817 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-combined-ca-bundle\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.637950 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-certs\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.646906 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-certs\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.652365 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-config-data\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.652590 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-combined-ca-bundle\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.653663 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-scripts\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.662379 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfwws\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-kube-api-access-zfwws\") pod \"cloudkitty-db-sync-wsdsc\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.680317 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb74w" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" probeResult="failure" output=< Jan 30 21:53:38 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:53:38 crc kubenswrapper[4721]: > Jan 30 21:53:38 crc kubenswrapper[4721]: I0130 21:53:38.825814 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:39 crc kubenswrapper[4721]: I0130 21:53:39.375457 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-wsdsc"] Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.105288 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbe7c436-51b8-4985-933d-fde2a16767bd" path="/var/lib/kubelet/pods/fbe7c436-51b8-4985-933d-fde2a16767bd/volumes" Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.228581 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wsdsc" event={"ID":"580cb859-85bd-451b-a61b-1c1dfde44b17","Type":"ContainerStarted","Data":"a79a988e0b9dfae21bafce3c65d80d06438549eb293da70d2694d7f6a95a29df"} Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.228634 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wsdsc" event={"ID":"580cb859-85bd-451b-a61b-1c1dfde44b17","Type":"ContainerStarted","Data":"904471cfcff0c56eaea867a303164185ac92b7bf6c518462ef8b1c544b8cd4f5"} Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.263119 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-wsdsc" podStartSLOduration=1.8597302679999999 podStartE2EDuration="2.26310101s" podCreationTimestamp="2026-01-30 21:53:38 +0000 UTC" firstStartedPulling="2026-01-30 21:53:39.382575657 +0000 UTC m=+2208.174476903" lastFinishedPulling="2026-01-30 21:53:39.785946399 +0000 UTC m=+2208.577847645" observedRunningTime="2026-01-30 21:53:40.251662135 +0000 UTC m=+2209.043563381" watchObservedRunningTime="2026-01-30 21:53:40.26310101 +0000 UTC m=+2209.055002276" Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.287297 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.614983 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.615606 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-central-agent" containerID="cri-o://d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad" gracePeriod=30 Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.615960 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="proxy-httpd" containerID="cri-o://98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742" gracePeriod=30 Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.616024 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="sg-core" containerID="cri-o://0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67" gracePeriod=30 Jan 30 21:53:40 crc kubenswrapper[4721]: I0130 21:53:40.616114 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-notification-agent" containerID="cri-o://683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84" gracePeriod=30 Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.110701 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.241789 4721 generic.go:334] "Generic (PLEG): container finished" podID="391f567e-1d28-4e02-8490-799e10ed88c3" containerID="98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742" exitCode=0 Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.241821 4721 generic.go:334] "Generic (PLEG): container finished" podID="391f567e-1d28-4e02-8490-799e10ed88c3" containerID="0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67" exitCode=2 Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.241829 4721 generic.go:334] "Generic (PLEG): container finished" podID="391f567e-1d28-4e02-8490-799e10ed88c3" containerID="d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad" exitCode=0 Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.241901 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerDied","Data":"98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742"} Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.241976 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerDied","Data":"0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67"} Jan 30 21:53:41 crc kubenswrapper[4721]: I0130 21:53:41.241991 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerDied","Data":"d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad"} Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.053698 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.133368 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-scripts\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.133433 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-run-httpd\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.133568 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-ceilometer-tls-certs\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.133716 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-sg-core-conf-yaml\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.133776 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mthtg\" (UniqueName: \"kubernetes.io/projected/391f567e-1d28-4e02-8490-799e10ed88c3-kube-api-access-mthtg\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.133900 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-config-data\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.134026 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.134452 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-log-httpd\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.134826 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-combined-ca-bundle\") pod \"391f567e-1d28-4e02-8490-799e10ed88c3\" (UID: \"391f567e-1d28-4e02-8490-799e10ed88c3\") " Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.135270 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.138605 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.138641 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/391f567e-1d28-4e02-8490-799e10ed88c3-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.182486 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-scripts" (OuterVolumeSpecName: "scripts") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.195592 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/391f567e-1d28-4e02-8490-799e10ed88c3-kube-api-access-mthtg" (OuterVolumeSpecName: "kube-api-access-mthtg") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "kube-api-access-mthtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.214601 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.242902 4721 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.242940 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mthtg\" (UniqueName: \"kubernetes.io/projected/391f567e-1d28-4e02-8490-799e10ed88c3-kube-api-access-mthtg\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.242952 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.284575 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.288228 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.298075 4721 generic.go:334] "Generic (PLEG): container finished" podID="391f567e-1d28-4e02-8490-799e10ed88c3" containerID="683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84" exitCode=0 Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.298122 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerDied","Data":"683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84"} Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.298155 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"391f567e-1d28-4e02-8490-799e10ed88c3","Type":"ContainerDied","Data":"963717bc9854f3f4075a63cac27497e43a06339d9731557a5e1602fa64a9abdb"} Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.298171 4721 scope.go:117] "RemoveContainer" containerID="98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.298411 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.326581 4721 scope.go:117] "RemoveContainer" containerID="0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.327403 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-config-data" (OuterVolumeSpecName: "config-data") pod "391f567e-1d28-4e02-8490-799e10ed88c3" (UID: "391f567e-1d28-4e02-8490-799e10ed88c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.345596 4721 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.345637 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.345650 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391f567e-1d28-4e02-8490-799e10ed88c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.358611 4721 scope.go:117] "RemoveContainer" containerID="683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.385349 4721 scope.go:117] "RemoveContainer" containerID="d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.411126 4721 scope.go:117] "RemoveContainer" containerID="98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.411766 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742\": container with ID starting with 98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742 not found: ID does not exist" containerID="98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.411828 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742"} err="failed to get container status \"98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742\": rpc error: code = NotFound desc = could not find container \"98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742\": container with ID starting with 98f3a9875880551b03f1579642efb14851a9aee575307b9a3d412da3ef74b742 not found: ID does not exist" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.411866 4721 scope.go:117] "RemoveContainer" containerID="0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.412266 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67\": container with ID starting with 0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67 not found: ID does not exist" containerID="0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.412333 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67"} err="failed to get container status \"0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67\": rpc error: code = NotFound desc = could not find container \"0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67\": container with ID starting with 0931f4664b58e89f687a5475b70cb49c674057f9ff01ea0a8ab7191942ca5c67 not found: ID does not exist" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.412365 4721 scope.go:117] "RemoveContainer" containerID="683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.412658 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84\": container with ID starting with 683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84 not found: ID does not exist" containerID="683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.412687 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84"} err="failed to get container status \"683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84\": rpc error: code = NotFound desc = could not find container \"683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84\": container with ID starting with 683bee82f978da4abd6c737d0336cdd8c15127ecd4cb8aacf1b7cd28036e5d84 not found: ID does not exist" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.412708 4721 scope.go:117] "RemoveContainer" containerID="d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.412986 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad\": container with ID starting with d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad not found: ID does not exist" containerID="d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.413031 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad"} err="failed to get container status \"d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad\": rpc error: code = NotFound desc = could not find container \"d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad\": container with ID starting with d0e408317729f3e5871105e58fe8b5835188e13abf38ecaf355b49c4598fa4ad not found: ID does not exist" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.504453 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.589137 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.661206 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.680033 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.690816 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.691278 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="proxy-httpd" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691343 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="proxy-httpd" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.691372 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="sg-core" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691379 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="sg-core" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.691390 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-notification-agent" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691399 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-notification-agent" Jan 30 21:53:43 crc kubenswrapper[4721]: E0130 21:53:43.691422 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-central-agent" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691429 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-central-agent" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691615 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-notification-agent" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691627 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="ceilometer-central-agent" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691639 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="proxy-httpd" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.691659 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" containerName="sg-core" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.693877 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.697968 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.698388 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.698522 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.715690 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.753472 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-scripts\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.753529 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-config-data\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.753580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.753608 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-run-httpd\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.753681 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.753844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chrb4\" (UniqueName: \"kubernetes.io/projected/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-kube-api-access-chrb4\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.754172 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-log-httpd\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.754246 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-log-httpd\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856226 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856268 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-scripts\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856318 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-config-data\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856345 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856359 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-run-httpd\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856443 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856515 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chrb4\" (UniqueName: \"kubernetes.io/projected/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-kube-api-access-chrb4\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.856828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-log-httpd\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.857155 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-run-httpd\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.877183 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.881628 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.882754 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-scripts\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.882928 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-config-data\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.883195 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:43 crc kubenswrapper[4721]: I0130 21:53:43.885121 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chrb4\" (UniqueName: \"kubernetes.io/projected/c3d1003c-e848-4ff5-a27e-f1cff2e8162c-kube-api-access-chrb4\") pod \"ceilometer-0\" (UID: \"c3d1003c-e848-4ff5-a27e-f1cff2e8162c\") " pod="openstack/ceilometer-0" Jan 30 21:53:44 crc kubenswrapper[4721]: I0130 21:53:44.014243 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 30 21:53:44 crc kubenswrapper[4721]: I0130 21:53:44.128072 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="391f567e-1d28-4e02-8490-799e10ed88c3" path="/var/lib/kubelet/pods/391f567e-1d28-4e02-8490-799e10ed88c3/volumes" Jan 30 21:53:44 crc kubenswrapper[4721]: I0130 21:53:44.163479 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4n25s"] Jan 30 21:53:44 crc kubenswrapper[4721]: I0130 21:53:44.628781 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 30 21:53:45 crc kubenswrapper[4721]: I0130 21:53:45.345130 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4n25s" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" containerID="cri-o://ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5" gracePeriod=2 Jan 30 21:53:45 crc kubenswrapper[4721]: I0130 21:53:45.345932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c3d1003c-e848-4ff5-a27e-f1cff2e8162c","Type":"ContainerStarted","Data":"19999e391bb69f9917033803cc2dc9cafeddf72c537d1cd5872caaf4ee27b385"} Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.108033 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.216041 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" containerID="cri-o://13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d" gracePeriod=604795 Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.234734 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-utilities\") pod \"bcfba918-3cbe-431d-95e4-7ad611a264bd\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.234942 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-catalog-content\") pod \"bcfba918-3cbe-431d-95e4-7ad611a264bd\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.235008 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvxkg\" (UniqueName: \"kubernetes.io/projected/bcfba918-3cbe-431d-95e4-7ad611a264bd-kube-api-access-qvxkg\") pod \"bcfba918-3cbe-431d-95e4-7ad611a264bd\" (UID: \"bcfba918-3cbe-431d-95e4-7ad611a264bd\") " Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.248129 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcfba918-3cbe-431d-95e4-7ad611a264bd-kube-api-access-qvxkg" (OuterVolumeSpecName: "kube-api-access-qvxkg") pod "bcfba918-3cbe-431d-95e4-7ad611a264bd" (UID: "bcfba918-3cbe-431d-95e4-7ad611a264bd"). InnerVolumeSpecName "kube-api-access-qvxkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.261470 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-utilities" (OuterVolumeSpecName: "utilities") pod "bcfba918-3cbe-431d-95e4-7ad611a264bd" (UID: "bcfba918-3cbe-431d-95e4-7ad611a264bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.302950 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bcfba918-3cbe-431d-95e4-7ad611a264bd" (UID: "bcfba918-3cbe-431d-95e4-7ad611a264bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.337282 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.337628 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvxkg\" (UniqueName: \"kubernetes.io/projected/bcfba918-3cbe-431d-95e4-7ad611a264bd-kube-api-access-qvxkg\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.337642 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcfba918-3cbe-431d-95e4-7ad611a264bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.348491 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" containerID="cri-o://bf9936ef84188ef8278c633d64dab570e01d424f399826f7adde77d19219657f" gracePeriod=604795 Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.362283 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerID="ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5" exitCode=0 Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.362344 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerDied","Data":"ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5"} Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.362373 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4n25s" event={"ID":"bcfba918-3cbe-431d-95e4-7ad611a264bd","Type":"ContainerDied","Data":"5a1ef4e587b69b2559199561a1a2b42a188bf8c8aa1ac4c49a3777a86711325d"} Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.362390 4721 scope.go:117] "RemoveContainer" containerID="ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.362437 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4n25s" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.413697 4721 scope.go:117] "RemoveContainer" containerID="a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.418330 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4n25s"] Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.429490 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4n25s"] Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.443479 4721 scope.go:117] "RemoveContainer" containerID="1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.507926 4721 scope.go:117] "RemoveContainer" containerID="ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5" Jan 30 21:53:46 crc kubenswrapper[4721]: E0130 21:53:46.508446 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5\": container with ID starting with ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5 not found: ID does not exist" containerID="ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.508483 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5"} err="failed to get container status \"ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5\": rpc error: code = NotFound desc = could not find container \"ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5\": container with ID starting with ec79507715b459dd8622f77fd8874a25063351b73f997eeb405e204497b801e5 not found: ID does not exist" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.508507 4721 scope.go:117] "RemoveContainer" containerID="a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f" Jan 30 21:53:46 crc kubenswrapper[4721]: E0130 21:53:46.508818 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f\": container with ID starting with a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f not found: ID does not exist" containerID="a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.508874 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f"} err="failed to get container status \"a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f\": rpc error: code = NotFound desc = could not find container \"a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f\": container with ID starting with a37605cdd6d1ab65f8e41fa9600fad338a46cad8006e26d3da2d50fa20e28c7f not found: ID does not exist" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.508909 4721 scope.go:117] "RemoveContainer" containerID="1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186" Jan 30 21:53:46 crc kubenswrapper[4721]: E0130 21:53:46.509407 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186\": container with ID starting with 1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186 not found: ID does not exist" containerID="1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.509438 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186"} err="failed to get container status \"1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186\": rpc error: code = NotFound desc = could not find container \"1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186\": container with ID starting with 1350d6bc59e7a281be3ed284624d09fb2c4de97a47b397505fad8ae5589d7186 not found: ID does not exist" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.644147 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.110:5671: connect: connection refused" Jan 30 21:53:46 crc kubenswrapper[4721]: I0130 21:53:46.951884 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.111:5671: connect: connection refused" Jan 30 21:53:47 crc kubenswrapper[4721]: I0130 21:53:47.402263 4721 generic.go:334] "Generic (PLEG): container finished" podID="580cb859-85bd-451b-a61b-1c1dfde44b17" containerID="a79a988e0b9dfae21bafce3c65d80d06438549eb293da70d2694d7f6a95a29df" exitCode=0 Jan 30 21:53:47 crc kubenswrapper[4721]: I0130 21:53:47.402410 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wsdsc" event={"ID":"580cb859-85bd-451b-a61b-1c1dfde44b17","Type":"ContainerDied","Data":"a79a988e0b9dfae21bafce3c65d80d06438549eb293da70d2694d7f6a95a29df"} Jan 30 21:53:48 crc kubenswrapper[4721]: I0130 21:53:48.106511 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" path="/var/lib/kubelet/pods/bcfba918-3cbe-431d-95e4-7ad611a264bd/volumes" Jan 30 21:53:48 crc kubenswrapper[4721]: I0130 21:53:48.687711 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb74w" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" probeResult="failure" output=< Jan 30 21:53:48 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:53:48 crc kubenswrapper[4721]: > Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.741822 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.817201 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-config-data\") pod \"580cb859-85bd-451b-a61b-1c1dfde44b17\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.817889 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-certs\") pod \"580cb859-85bd-451b-a61b-1c1dfde44b17\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.818632 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfwws\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-kube-api-access-zfwws\") pod \"580cb859-85bd-451b-a61b-1c1dfde44b17\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.818927 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-combined-ca-bundle\") pod \"580cb859-85bd-451b-a61b-1c1dfde44b17\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.819169 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-scripts\") pod \"580cb859-85bd-451b-a61b-1c1dfde44b17\" (UID: \"580cb859-85bd-451b-a61b-1c1dfde44b17\") " Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.825937 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-certs" (OuterVolumeSpecName: "certs") pod "580cb859-85bd-451b-a61b-1c1dfde44b17" (UID: "580cb859-85bd-451b-a61b-1c1dfde44b17"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.828755 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-scripts" (OuterVolumeSpecName: "scripts") pod "580cb859-85bd-451b-a61b-1c1dfde44b17" (UID: "580cb859-85bd-451b-a61b-1c1dfde44b17"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.832922 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-kube-api-access-zfwws" (OuterVolumeSpecName: "kube-api-access-zfwws") pod "580cb859-85bd-451b-a61b-1c1dfde44b17" (UID: "580cb859-85bd-451b-a61b-1c1dfde44b17"). InnerVolumeSpecName "kube-api-access-zfwws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.862691 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "580cb859-85bd-451b-a61b-1c1dfde44b17" (UID: "580cb859-85bd-451b-a61b-1c1dfde44b17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.870750 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-config-data" (OuterVolumeSpecName: "config-data") pod "580cb859-85bd-451b-a61b-1c1dfde44b17" (UID: "580cb859-85bd-451b-a61b-1c1dfde44b17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.922656 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfwws\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-kube-api-access-zfwws\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.922701 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.922716 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.922726 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cb859-85bd-451b-a61b-1c1dfde44b17-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:49 crc kubenswrapper[4721]: I0130 21:53:49.922736 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/580cb859-85bd-451b-a61b-1c1dfde44b17-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.441503 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wsdsc" event={"ID":"580cb859-85bd-451b-a61b-1c1dfde44b17","Type":"ContainerDied","Data":"904471cfcff0c56eaea867a303164185ac92b7bf6c518462ef8b1c544b8cd4f5"} Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.441556 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="904471cfcff0c56eaea867a303164185ac92b7bf6c518462ef8b1c544b8cd4f5" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.441957 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wsdsc" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.855513 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-qmlfm"] Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.864736 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-qmlfm"] Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.933570 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-ttrpx"] Jan 30 21:53:50 crc kubenswrapper[4721]: E0130 21:53:50.934041 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="extract-content" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.934058 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="extract-content" Jan 30 21:53:50 crc kubenswrapper[4721]: E0130 21:53:50.934095 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.934102 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" Jan 30 21:53:50 crc kubenswrapper[4721]: E0130 21:53:50.934114 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="extract-utilities" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.934121 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="extract-utilities" Jan 30 21:53:50 crc kubenswrapper[4721]: E0130 21:53:50.934134 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="580cb859-85bd-451b-a61b-1c1dfde44b17" containerName="cloudkitty-db-sync" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.934140 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="580cb859-85bd-451b-a61b-1c1dfde44b17" containerName="cloudkitty-db-sync" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.934502 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="580cb859-85bd-451b-a61b-1c1dfde44b17" containerName="cloudkitty-db-sync" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.934529 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcfba918-3cbe-431d-95e4-7ad611a264bd" containerName="registry-server" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.935253 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.945198 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 30 21:53:50 crc kubenswrapper[4721]: I0130 21:53:50.948707 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-ttrpx"] Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.044990 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-scripts\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.045367 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-config-data\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.045410 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-combined-ca-bundle\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.045525 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-certs\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.045644 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlkfg\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-kube-api-access-zlkfg\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.165176 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-scripts\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.165277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-config-data\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.165307 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-combined-ca-bundle\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.165585 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-certs\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.165910 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlkfg\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-kube-api-access-zlkfg\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.192045 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-scripts\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.194033 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-certs\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.207318 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-config-data\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.219051 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlkfg\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-kube-api-access-zlkfg\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.232082 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-combined-ca-bundle\") pod \"cloudkitty-storageinit-ttrpx\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.253478 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:51 crc kubenswrapper[4721]: W0130 21:53:51.725500 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod387eb724_8919_4a46_919a_083c3b427d3e.slice/crio-79ce46b6e50f4cf9285773ca2232da442e965abf1537f5b84eaf161ac03d0b23 WatchSource:0}: Error finding container 79ce46b6e50f4cf9285773ca2232da442e965abf1537f5b84eaf161ac03d0b23: Status 404 returned error can't find the container with id 79ce46b6e50f4cf9285773ca2232da442e965abf1537f5b84eaf161ac03d0b23 Jan 30 21:53:51 crc kubenswrapper[4721]: I0130 21:53:51.726315 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-ttrpx"] Jan 30 21:53:52 crc kubenswrapper[4721]: I0130 21:53:52.110410 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="453b837a-3b2e-4993-90e4-ea72ad1f6b9e" path="/var/lib/kubelet/pods/453b837a-3b2e-4993-90e4-ea72ad1f6b9e/volumes" Jan 30 21:53:52 crc kubenswrapper[4721]: I0130 21:53:52.466561 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-ttrpx" event={"ID":"387eb724-8919-4a46-919a-083c3b427d3e","Type":"ContainerStarted","Data":"bab0201974485fa337d3a9a065d623c27f5cca7996174c0741468d30fbb998aa"} Jan 30 21:53:52 crc kubenswrapper[4721]: I0130 21:53:52.467026 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-ttrpx" event={"ID":"387eb724-8919-4a46-919a-083c3b427d3e","Type":"ContainerStarted","Data":"79ce46b6e50f4cf9285773ca2232da442e965abf1537f5b84eaf161ac03d0b23"} Jan 30 21:53:52 crc kubenswrapper[4721]: I0130 21:53:52.470417 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c3d1003c-e848-4ff5-a27e-f1cff2e8162c","Type":"ContainerStarted","Data":"c7742a919ba084a42e9d37a1585d9ea58cc5f6ce4aaf7bae393860b74e8aeccc"} Jan 30 21:53:52 crc kubenswrapper[4721]: I0130 21:53:52.496943 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-ttrpx" podStartSLOduration=2.496914506 podStartE2EDuration="2.496914506s" podCreationTimestamp="2026-01-30 21:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:53:52.483072547 +0000 UTC m=+2221.274973803" watchObservedRunningTime="2026-01-30 21:53:52.496914506 +0000 UTC m=+2221.288815752" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.221693 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.355128 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-server-conf\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.355199 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-plugins\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.355263 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-config-data\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.355333 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b667j\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-kube-api-access-b667j\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358274 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358371 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-plugins-conf\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358470 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-erlang-cookie\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358489 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d483e4e0-6513-44ce-b601-359b9c2262ca-pod-info\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358548 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-tls\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358566 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-confd\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.358593 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d483e4e0-6513-44ce-b601-359b9c2262ca-erlang-cookie-secret\") pod \"d483e4e0-6513-44ce-b601-359b9c2262ca\" (UID: \"d483e4e0-6513-44ce-b601-359b9c2262ca\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.362194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.368761 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-kube-api-access-b667j" (OuterVolumeSpecName: "kube-api-access-b667j") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "kube-api-access-b667j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.373029 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.374510 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.405470 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.409405 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d483e4e0-6513-44ce-b601-359b9c2262ca-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.427945 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d483e4e0-6513-44ce-b601-359b9c2262ca-pod-info" (OuterVolumeSpecName: "pod-info") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.430748 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6" (OuterVolumeSpecName: "persistence") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463819 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463872 4721 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d483e4e0-6513-44ce-b601-359b9c2262ca-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463883 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463894 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b667j\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-kube-api-access-b667j\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463921 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") on node \"crc\" " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463930 4721 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463958 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.463966 4721 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d483e4e0-6513-44ce-b601-359b9c2262ca-pod-info\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.501645 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-config-data" (OuterVolumeSpecName: "config-data") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.502037 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.502945 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6") on node "crc" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.504026 4721 generic.go:334] "Generic (PLEG): container finished" podID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerID="13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d" exitCode=0 Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.504085 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d483e4e0-6513-44ce-b601-359b9c2262ca","Type":"ContainerDied","Data":"13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d"} Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.504115 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d483e4e0-6513-44ce-b601-359b9c2262ca","Type":"ContainerDied","Data":"7c35c52bf45495b6d32fad9ef0dd875bb2a6146d773de2577a158d4bdf5faab5"} Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.504136 4721 scope.go:117] "RemoveContainer" containerID="13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.504287 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.527192 4721 generic.go:334] "Generic (PLEG): container finished" podID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerID="bf9936ef84188ef8278c633d64dab570e01d424f399826f7adde77d19219657f" exitCode=0 Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.527369 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1f120802-4119-4ed8-bf74-62b1e4a534bc","Type":"ContainerDied","Data":"bf9936ef84188ef8278c633d64dab570e01d424f399826f7adde77d19219657f"} Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.536136 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-server-conf" (OuterVolumeSpecName: "server-conf") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.550628 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c3d1003c-e848-4ff5-a27e-f1cff2e8162c","Type":"ContainerStarted","Data":"90cec4675d859a30b316e6f996aa7691f4685549aef4e6cc81ac716ab6353281"} Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.565291 4721 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-server-conf\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.565353 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d483e4e0-6513-44ce-b601-359b9c2262ca-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.565366 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.648839 4721 scope.go:117] "RemoveContainer" containerID="7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.698115 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d483e4e0-6513-44ce-b601-359b9c2262ca" (UID: "d483e4e0-6513-44ce-b601-359b9c2262ca"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.753070 4721 scope.go:117] "RemoveContainer" containerID="13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d" Jan 30 21:53:53 crc kubenswrapper[4721]: E0130 21:53:53.754207 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d\": container with ID starting with 13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d not found: ID does not exist" containerID="13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.754241 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d"} err="failed to get container status \"13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d\": rpc error: code = NotFound desc = could not find container \"13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d\": container with ID starting with 13b906c053eb5aadc3bb53b9fb302fe156e0c40547b0aef707d9b1178ad5cb9d not found: ID does not exist" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.754263 4721 scope.go:117] "RemoveContainer" containerID="7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec" Jan 30 21:53:53 crc kubenswrapper[4721]: E0130 21:53:53.754755 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec\": container with ID starting with 7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec not found: ID does not exist" containerID="7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.754780 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec"} err="failed to get container status \"7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec\": rpc error: code = NotFound desc = could not find container \"7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec\": container with ID starting with 7de4241d48557a701b7d8225095db0dd900803f04c7c888eb71f5b38291b7fec not found: ID does not exist" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.770485 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d483e4e0-6513-44ce-b601-359b9c2262ca-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.780397 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.899677 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.899880 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-plugins\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.899989 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-erlang-cookie\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900050 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1f120802-4119-4ed8-bf74-62b1e4a534bc-pod-info\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900092 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2dgp\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-kube-api-access-k2dgp\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900130 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-server-conf\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900176 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-confd\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900228 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-config-data\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900287 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-plugins-conf\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900358 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-tls\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.900426 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1f120802-4119-4ed8-bf74-62b1e4a534bc-erlang-cookie-secret\") pod \"1f120802-4119-4ed8-bf74-62b1e4a534bc\" (UID: \"1f120802-4119-4ed8-bf74-62b1e4a534bc\") " Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.916086 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:53:53 crc kubenswrapper[4721]: I0130 21:53:53.932398 4721 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.024795 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.025039 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.034929 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.035774 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1f120802-4119-4ed8-bf74-62b1e4a534bc-pod-info" (OuterVolumeSpecName: "pod-info") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.035955 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f120802-4119-4ed8-bf74-62b1e4a534bc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.036999 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.038355 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-config-data" (OuterVolumeSpecName: "config-data") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.038375 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.045659 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-kube-api-access-k2dgp" (OuterVolumeSpecName: "kube-api-access-k2dgp") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "kube-api-access-k2dgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.045827 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1" (OuterVolumeSpecName: "persistence") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "pvc-d87170ce-4cf3-4720-91cc-969ab48490b1". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.061326 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-server-conf" (OuterVolumeSpecName: "server-conf") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.083613 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.116403 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" path="/var/lib/kubelet/pods/d483e4e0-6513-44ce-b601-359b9c2262ca/volumes" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.137838 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") on node \"crc\" " Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.137977 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.138054 4721 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1f120802-4119-4ed8-bf74-62b1e4a534bc-pod-info\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.138116 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2dgp\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-kube-api-access-k2dgp\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.138253 4721 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-server-conf\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.138351 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f120802-4119-4ed8-bf74-62b1e4a534bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.138430 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.138482 4721 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1f120802-4119-4ed8-bf74-62b1e4a534bc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.159543 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1f120802-4119-4ed8-bf74-62b1e4a534bc" (UID: "1f120802-4119-4ed8-bf74-62b1e4a534bc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.225288 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: E0130 21:53:54.225855 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="setup-container" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.225873 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="setup-container" Jan 30 21:53:54 crc kubenswrapper[4721]: E0130 21:53:54.225899 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="setup-container" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.225907 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="setup-container" Jan 30 21:53:54 crc kubenswrapper[4721]: E0130 21:53:54.225922 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.225930 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" Jan 30 21:53:54 crc kubenswrapper[4721]: E0130 21:53:54.225957 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.225965 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.226270 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d483e4e0-6513-44ce-b601-359b9c2262ca" containerName="rabbitmq" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.226335 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" containerName="rabbitmq" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.227748 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.227851 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.229459 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.229707 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-d87170ce-4cf3-4720-91cc-969ab48490b1" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1") on node "crc" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.231775 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.232126 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.232331 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.232498 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-bss2v" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.232519 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.232670 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.240823 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.241266 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1f120802-4119-4ed8-bf74-62b1e4a534bc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.241287 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.342905 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmzmg\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-kube-api-access-mmzmg\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343247 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343328 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343368 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343526 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343613 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343767 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343843 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343897 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-config-data\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.343956 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.344201 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447533 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447628 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447664 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447724 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447799 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-config-data\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447837 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447948 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.447998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmzmg\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-kube-api-access-mmzmg\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.448032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.448093 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.448410 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.448935 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.449359 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-config-data\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.451880 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.452504 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.456183 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.458024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.461152 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.484022 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.485581 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.485612 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d2e49a257cea4677cd616c88fc5a81899852b75682faa476306530b19a572e1c/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.494741 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmzmg\" (UniqueName: \"kubernetes.io/projected/7ccec6ec-8034-4a0f-88a6-b86751e0f22b-kube-api-access-mmzmg\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.621066 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1f120802-4119-4ed8-bf74-62b1e4a534bc","Type":"ContainerDied","Data":"73145151fc6d97c9900c2ca67909d64e973403bcb60449a9ef8af3d302dbddce"} Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.621132 4721 scope.go:117] "RemoveContainer" containerID="bf9936ef84188ef8278c633d64dab570e01d424f399826f7adde77d19219657f" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.621365 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.623248 4721 generic.go:334] "Generic (PLEG): container finished" podID="387eb724-8919-4a46-919a-083c3b427d3e" containerID="bab0201974485fa337d3a9a065d623c27f5cca7996174c0741468d30fbb998aa" exitCode=0 Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.623324 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-ttrpx" event={"ID":"387eb724-8919-4a46-919a-083c3b427d3e","Type":"ContainerDied","Data":"bab0201974485fa337d3a9a065d623c27f5cca7996174c0741468d30fbb998aa"} Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.626431 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c3d1003c-e848-4ff5-a27e-f1cff2e8162c","Type":"ContainerStarted","Data":"1258cc2c5f033f340d765b170682cc4045ac038191b692d4548e2c99056fa0c5"} Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.681143 4721 scope.go:117] "RemoveContainer" containerID="c0b0b5623f5f35b50de663a7d761c3d99abef05cd90b85da9b628b2b5a7c2233" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.733742 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.735541 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de640d1-09f8-4ca0-9106-cb63d84677d6\") pod \"rabbitmq-server-0\" (UID: \"7ccec6ec-8034-4a0f-88a6-b86751e0f22b\") " pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.755439 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.787390 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.790318 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.800930 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.801241 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.801822 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.802070 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-5sdtt" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.802267 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.802633 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.802872 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.838202 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.847998 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.856714 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/12f1cce2-7b07-4519-b1c3-15e57ed44cde-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.856784 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.856832 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.856877 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.856932 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.857208 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.857263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.857308 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.857341 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/12f1cce2-7b07-4519-b1c3-15e57ed44cde-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.857391 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.857440 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56bz5\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-kube-api-access-56bz5\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: E0130 21:53:54.861715 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f120802_4119_4ed8_bf74_62b1e4a534bc.slice/crio-73145151fc6d97c9900c2ca67909d64e973403bcb60449a9ef8af3d302dbddce\": RecentStats: unable to find data in memory cache]" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.959578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.959937 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.959964 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960010 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960033 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960058 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/12f1cce2-7b07-4519-b1c3-15e57ed44cde-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960138 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960227 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56bz5\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-kube-api-access-56bz5\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960327 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/12f1cce2-7b07-4519-b1c3-15e57ed44cde-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960407 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.960455 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.961203 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.962187 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.962567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.963323 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.964997 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.965033 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/825a5bf72240810fee23f32c55feb81894d03f5689f753e1fa13da9c43ef0714/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.966884 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/12f1cce2-7b07-4519-b1c3-15e57ed44cde-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.968026 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/12f1cce2-7b07-4519-b1c3-15e57ed44cde-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.968103 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/12f1cce2-7b07-4519-b1c3-15e57ed44cde-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.970018 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.970115 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:54 crc kubenswrapper[4721]: I0130 21:53:54.994109 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56bz5\" (UniqueName: \"kubernetes.io/projected/12f1cce2-7b07-4519-b1c3-15e57ed44cde-kube-api-access-56bz5\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:55 crc kubenswrapper[4721]: I0130 21:53:55.027738 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d87170ce-4cf3-4720-91cc-969ab48490b1\") pod \"rabbitmq-cell1-server-0\" (UID: \"12f1cce2-7b07-4519-b1c3-15e57ed44cde\") " pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:55 crc kubenswrapper[4721]: I0130 21:53:55.120541 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:53:55 crc kubenswrapper[4721]: I0130 21:53:55.370728 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 30 21:53:55 crc kubenswrapper[4721]: I0130 21:53:55.643264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ccec6ec-8034-4a0f-88a6-b86751e0f22b","Type":"ContainerStarted","Data":"afeb313495cb3f08c2cf978e8bf7e8f64ea6963218416f94203a0892962cdb73"} Jan 30 21:53:55 crc kubenswrapper[4721]: I0130 21:53:55.830544 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.106091 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f120802-4119-4ed8-bf74-62b1e4a534bc" path="/var/lib/kubelet/pods/1f120802-4119-4ed8-bf74-62b1e4a534bc/volumes" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.122586 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.300106 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-config-data\") pod \"387eb724-8919-4a46-919a-083c3b427d3e\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.300188 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-combined-ca-bundle\") pod \"387eb724-8919-4a46-919a-083c3b427d3e\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.300354 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-certs\") pod \"387eb724-8919-4a46-919a-083c3b427d3e\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.300384 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-scripts\") pod \"387eb724-8919-4a46-919a-083c3b427d3e\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.301007 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlkfg\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-kube-api-access-zlkfg\") pod \"387eb724-8919-4a46-919a-083c3b427d3e\" (UID: \"387eb724-8919-4a46-919a-083c3b427d3e\") " Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.414007 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-kube-api-access-zlkfg" (OuterVolumeSpecName: "kube-api-access-zlkfg") pod "387eb724-8919-4a46-919a-083c3b427d3e" (UID: "387eb724-8919-4a46-919a-083c3b427d3e"). InnerVolumeSpecName "kube-api-access-zlkfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.414610 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-scripts" (OuterVolumeSpecName: "scripts") pod "387eb724-8919-4a46-919a-083c3b427d3e" (UID: "387eb724-8919-4a46-919a-083c3b427d3e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.415086 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-certs" (OuterVolumeSpecName: "certs") pod "387eb724-8919-4a46-919a-083c3b427d3e" (UID: "387eb724-8919-4a46-919a-083c3b427d3e"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.505327 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.505360 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.505373 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlkfg\" (UniqueName: \"kubernetes.io/projected/387eb724-8919-4a46-919a-083c3b427d3e-kube-api-access-zlkfg\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.541099 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "387eb724-8919-4a46-919a-083c3b427d3e" (UID: "387eb724-8919-4a46-919a-083c3b427d3e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.607039 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.655018 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-ttrpx" event={"ID":"387eb724-8919-4a46-919a-083c3b427d3e","Type":"ContainerDied","Data":"79ce46b6e50f4cf9285773ca2232da442e965abf1537f5b84eaf161ac03d0b23"} Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.655064 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-ttrpx" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.655055 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79ce46b6e50f4cf9285773ca2232da442e965abf1537f5b84eaf161ac03d0b23" Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.655891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"12f1cce2-7b07-4519-b1c3-15e57ed44cde","Type":"ContainerStarted","Data":"e2f9265d2c0b4aea7f889dc88b2c02f69f569be863472c5b9ee7a7b33c500007"} Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.767742 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.768002 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="70ce3206-6193-4c77-93bf-9fdb366a9a7d" containerName="cloudkitty-proc" containerID="cri-o://0a8dbbc7fcb51d39b000928f1057573deb327ac4c85033233a13392666c3077d" gracePeriod=30 Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.783622 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.783917 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api-log" containerID="cri-o://a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6" gracePeriod=30 Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.784059 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api" containerID="cri-o://e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36" gracePeriod=30 Jan 30 21:53:56 crc kubenswrapper[4721]: I0130 21:53:56.947879 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-config-data" (OuterVolumeSpecName: "config-data") pod "387eb724-8919-4a46-919a-083c3b427d3e" (UID: "387eb724-8919-4a46-919a-083c3b427d3e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:57 crc kubenswrapper[4721]: I0130 21:53:57.014066 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387eb724-8919-4a46-919a-083c3b427d3e-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:57 crc kubenswrapper[4721]: I0130 21:53:57.671330 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ccec6ec-8034-4a0f-88a6-b86751e0f22b","Type":"ContainerStarted","Data":"27107000ff079ca66fd926f83803bee451e0b0ee0ce932d3388a1f3b577fe041"} Jan 30 21:53:57 crc kubenswrapper[4721]: I0130 21:53:57.675485 4721 generic.go:334] "Generic (PLEG): container finished" podID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerID="a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6" exitCode=143 Jan 30 21:53:57 crc kubenswrapper[4721]: I0130 21:53:57.675525 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"ffd01d7d-385c-4b0d-bd25-291e63104c09","Type":"ContainerDied","Data":"a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6"} Jan 30 21:53:58 crc kubenswrapper[4721]: I0130 21:53:58.692006 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb74w" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" probeResult="failure" output=< Jan 30 21:53:58 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:53:58 crc kubenswrapper[4721]: > Jan 30 21:53:58 crc kubenswrapper[4721]: I0130 21:53:58.713824 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"12f1cce2-7b07-4519-b1c3-15e57ed44cde","Type":"ContainerStarted","Data":"c6508fc71e147624bdf5581c6a81dfde9d0cd99d3db9cbf4899971034704eff9"} Jan 30 21:53:58 crc kubenswrapper[4721]: I0130 21:53:58.720317 4721 generic.go:334] "Generic (PLEG): container finished" podID="70ce3206-6193-4c77-93bf-9fdb366a9a7d" containerID="0a8dbbc7fcb51d39b000928f1057573deb327ac4c85033233a13392666c3077d" exitCode=0 Jan 30 21:53:58 crc kubenswrapper[4721]: I0130 21:53:58.720341 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"70ce3206-6193-4c77-93bf-9fdb366a9a7d","Type":"ContainerDied","Data":"0a8dbbc7fcb51d39b000928f1057573deb327ac4c85033233a13392666c3077d"} Jan 30 21:53:58 crc kubenswrapper[4721]: I0130 21:53:58.750374 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-api-0" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.196:8889/healthcheck\": dial tcp 10.217.0.196:8889: connect: connection refused" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.034887 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.110356 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-q92m8"] Jan 30 21:53:59 crc kubenswrapper[4721]: E0130 21:53:59.111411 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="387eb724-8919-4a46-919a-083c3b427d3e" containerName="cloudkitty-storageinit" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.111484 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="387eb724-8919-4a46-919a-083c3b427d3e" containerName="cloudkitty-storageinit" Jan 30 21:53:59 crc kubenswrapper[4721]: E0130 21:53:59.111555 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ce3206-6193-4c77-93bf-9fdb366a9a7d" containerName="cloudkitty-proc" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.111607 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ce3206-6193-4c77-93bf-9fdb366a9a7d" containerName="cloudkitty-proc" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.111895 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="387eb724-8919-4a46-919a-083c3b427d3e" containerName="cloudkitty-storageinit" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.112064 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="70ce3206-6193-4c77-93bf-9fdb366a9a7d" containerName="cloudkitty-proc" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.113265 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.118606 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.125969 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-q92m8"] Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.199215 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-combined-ca-bundle\") pod \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.199455 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvkgd\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-kube-api-access-pvkgd\") pod \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.199495 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-scripts\") pod \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.199611 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data\") pod \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.199631 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-certs\") pod \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.199742 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data-custom\") pod \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\" (UID: \"70ce3206-6193-4c77-93bf-9fdb366a9a7d\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.203825 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-scripts" (OuterVolumeSpecName: "scripts") pod "70ce3206-6193-4c77-93bf-9fdb366a9a7d" (UID: "70ce3206-6193-4c77-93bf-9fdb366a9a7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.204113 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-certs" (OuterVolumeSpecName: "certs") pod "70ce3206-6193-4c77-93bf-9fdb366a9a7d" (UID: "70ce3206-6193-4c77-93bf-9fdb366a9a7d"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.208671 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-kube-api-access-pvkgd" (OuterVolumeSpecName: "kube-api-access-pvkgd") pod "70ce3206-6193-4c77-93bf-9fdb366a9a7d" (UID: "70ce3206-6193-4c77-93bf-9fdb366a9a7d"). InnerVolumeSpecName "kube-api-access-pvkgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.211418 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "70ce3206-6193-4c77-93bf-9fdb366a9a7d" (UID: "70ce3206-6193-4c77-93bf-9fdb366a9a7d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.241138 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data" (OuterVolumeSpecName: "config-data") pod "70ce3206-6193-4c77-93bf-9fdb366a9a7d" (UID: "70ce3206-6193-4c77-93bf-9fdb366a9a7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.244562 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70ce3206-6193-4c77-93bf-9fdb366a9a7d" (UID: "70ce3206-6193-4c77-93bf-9fdb366a9a7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.301509 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-config\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.301569 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.302570 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.302645 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.302720 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml74l\" (UniqueName: \"kubernetes.io/projected/8ec61729-73e8-4bc5-bc88-db2944c40ff9-kube-api-access-ml74l\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.302795 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.302851 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.302993 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvkgd\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-kube-api-access-pvkgd\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.303021 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.303036 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.303048 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/70ce3206-6193-4c77-93bf-9fdb366a9a7d-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.303058 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.303070 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce3206-6193-4c77-93bf-9fdb366a9a7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.405615 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.405724 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml74l\" (UniqueName: \"kubernetes.io/projected/8ec61729-73e8-4bc5-bc88-db2944c40ff9-kube-api-access-ml74l\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.405985 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.406037 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.406182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-config\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.406218 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.406252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.406858 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.407455 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.407663 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.408166 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-config\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.408754 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.409248 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.432565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml74l\" (UniqueName: \"kubernetes.io/projected/8ec61729-73e8-4bc5-bc88-db2944c40ff9-kube-api-access-ml74l\") pod \"dnsmasq-dns-dbb88bf8c-q92m8\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.446481 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.448897 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.448945 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.448987 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.449674 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.449741 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" gracePeriod=600 Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.801630 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.810746 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.811047 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"70ce3206-6193-4c77-93bf-9fdb366a9a7d","Type":"ContainerDied","Data":"1cd9ca499a3f23b2fc6cf5154f34d200e9a9ca56e1e8881fa6384b98ad866981"} Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.811108 4721 scope.go:117] "RemoveContainer" containerID="0a8dbbc7fcb51d39b000928f1057573deb327ac4c85033233a13392666c3077d" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829174 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-scripts\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829234 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829344 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-public-tls-certs\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829418 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data-custom\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829455 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-certs\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829478 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-internal-tls-certs\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829510 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-combined-ca-bundle\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829535 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xw6r\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-kube-api-access-7xw6r\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.829607 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd01d7d-385c-4b0d-bd25-291e63104c09-logs\") pod \"ffd01d7d-385c-4b0d-bd25-291e63104c09\" (UID: \"ffd01d7d-385c-4b0d-bd25-291e63104c09\") " Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.830370 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffd01d7d-385c-4b0d-bd25-291e63104c09-logs" (OuterVolumeSpecName: "logs") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.876326 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.876823 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" exitCode=0 Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.876893 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b"} Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.879664 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-kube-api-access-7xw6r" (OuterVolumeSpecName: "kube-api-access-7xw6r") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "kube-api-access-7xw6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.900565 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-certs" (OuterVolumeSpecName: "certs") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.906499 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-scripts" (OuterVolumeSpecName: "scripts") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.921041 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.930761 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.931950 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-scripts\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.931967 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.931975 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.931985 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xw6r\" (UniqueName: \"kubernetes.io/projected/ffd01d7d-385c-4b0d-bd25-291e63104c09-kube-api-access-7xw6r\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.931994 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ffd01d7d-385c-4b0d-bd25-291e63104c09-logs\") on node \"crc\" DevicePath \"\"" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.954364 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:53:59 crc kubenswrapper[4721]: E0130 21:53:59.954851 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.954864 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api" Jan 30 21:53:59 crc kubenswrapper[4721]: E0130 21:53:59.954902 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api-log" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.954908 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api-log" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.955095 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.955120 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerName="cloudkitty-api-log" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.955824 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c3d1003c-e848-4ff5-a27e-f1cff2e8162c","Type":"ContainerStarted","Data":"fb0d4d21277973d48988bc88228397ba2a67efa8b383ca93e27aa1155862ab3c"} Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.955855 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.955934 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.960718 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.972366 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:53:59 crc kubenswrapper[4721]: I0130 21:53:59.996614 4721 generic.go:334] "Generic (PLEG): container finished" podID="ffd01d7d-385c-4b0d-bd25-291e63104c09" containerID="e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36" exitCode=0 Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:53:59.999012 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.000384 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"ffd01d7d-385c-4b0d-bd25-291e63104c09","Type":"ContainerDied","Data":"e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36"} Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.000455 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"ffd01d7d-385c-4b0d-bd25-291e63104c09","Type":"ContainerDied","Data":"0575286f3f7e2019d65398c110ec8fd83f98bc88b45b186b11d33a2b8960b712"} Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.001198 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data" (OuterVolumeSpecName: "config-data") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.011907 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.816496923 podStartE2EDuration="17.011877506s" podCreationTimestamp="2026-01-30 21:53:43 +0000 UTC" firstStartedPulling="2026-01-30 21:53:44.631501945 +0000 UTC m=+2213.423403191" lastFinishedPulling="2026-01-30 21:53:58.826882528 +0000 UTC m=+2227.618783774" observedRunningTime="2026-01-30 21:54:00.000828973 +0000 UTC m=+2228.792730219" watchObservedRunningTime="2026-01-30 21:54:00.011877506 +0000 UTC m=+2228.803778752" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.036739 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.074711 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.114689 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.130441 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ffd01d7d-385c-4b0d-bd25-291e63104c09" (UID: "ffd01d7d-385c-4b0d-bd25-291e63104c09"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143235 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9l9w\" (UniqueName: \"kubernetes.io/projected/0a596943-21b2-4c3d-9687-150ce3bde8f7-kube-api-access-g9l9w\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143492 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143516 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-config-data\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143604 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-scripts\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143653 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/0a596943-21b2-4c3d-9687-150ce3bde8f7-certs\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143673 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.143957 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70ce3206-6193-4c77-93bf-9fdb366a9a7d" path="/var/lib/kubelet/pods/70ce3206-6193-4c77-93bf-9fdb366a9a7d/volumes" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.148540 4721 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.151434 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-q92m8"] Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.153035 4721 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.153865 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffd01d7d-385c-4b0d-bd25-291e63104c09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.255799 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9l9w\" (UniqueName: \"kubernetes.io/projected/0a596943-21b2-4c3d-9687-150ce3bde8f7-kube-api-access-g9l9w\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.256020 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-config-data\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.256129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.256240 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-scripts\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.256338 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/0a596943-21b2-4c3d-9687-150ce3bde8f7-certs\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.256410 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.260950 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.261853 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.262374 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-scripts\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.266024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a596943-21b2-4c3d-9687-150ce3bde8f7-config-data\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.275697 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9l9w\" (UniqueName: \"kubernetes.io/projected/0a596943-21b2-4c3d-9687-150ce3bde8f7-kube-api-access-g9l9w\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.331554 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/0a596943-21b2-4c3d-9687-150ce3bde8f7-certs\") pod \"cloudkitty-proc-0\" (UID: \"0a596943-21b2-4c3d-9687-150ce3bde8f7\") " pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.336466 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.352632 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.354066 4721 scope.go:117] "RemoveContainer" containerID="6c7f0e72b263faf1f8c74cf000b0aea1b54cadc81c92f3cce81eb40376057c48" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.378495 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.380227 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.382247 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.382424 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.382691 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.400821 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.483496 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.561528 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-config-data\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.561599 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.561629 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-scripts\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.561808 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.561897 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41460120-522d-44cf-a772-29cb623f9c14-logs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.561954 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.562002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/41460120-522d-44cf-a772-29cb623f9c14-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.562144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjlps\" (UniqueName: \"kubernetes.io/projected/41460120-522d-44cf-a772-29cb623f9c14-kube-api-access-xjlps\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.562188 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: E0130 21:54:00.565462 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664363 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-config-data\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664445 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664470 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-scripts\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664496 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664521 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41460120-522d-44cf-a772-29cb623f9c14-logs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664542 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/41460120-522d-44cf-a772-29cb623f9c14-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664605 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjlps\" (UniqueName: \"kubernetes.io/projected/41460120-522d-44cf-a772-29cb623f9c14-kube-api-access-xjlps\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.664628 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.670998 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.671269 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41460120-522d-44cf-a772-29cb623f9c14-logs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.674134 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.678532 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.680778 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/41460120-522d-44cf-a772-29cb623f9c14-certs\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.683449 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-scripts\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.686924 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.689339 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41460120-522d-44cf-a772-29cb623f9c14-config-data\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.691361 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjlps\" (UniqueName: \"kubernetes.io/projected/41460120-522d-44cf-a772-29cb623f9c14-kube-api-access-xjlps\") pod \"cloudkitty-api-0\" (UID: \"41460120-522d-44cf-a772-29cb623f9c14\") " pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.718414 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.871114 4721 scope.go:117] "RemoveContainer" containerID="e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.917827 4721 scope.go:117] "RemoveContainer" containerID="a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.955056 4721 scope.go:117] "RemoveContainer" containerID="e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36" Jan 30 21:54:00 crc kubenswrapper[4721]: E0130 21:54:00.956359 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36\": container with ID starting with e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36 not found: ID does not exist" containerID="e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.956465 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36"} err="failed to get container status \"e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36\": rpc error: code = NotFound desc = could not find container \"e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36\": container with ID starting with e38873b49b649bcd23a4e92880bbe5a2aa39e2fc457b5ccbbc8140aaf4e20b36 not found: ID does not exist" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.956563 4721 scope.go:117] "RemoveContainer" containerID="a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6" Jan 30 21:54:00 crc kubenswrapper[4721]: E0130 21:54:00.958514 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6\": container with ID starting with a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6 not found: ID does not exist" containerID="a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6" Jan 30 21:54:00 crc kubenswrapper[4721]: I0130 21:54:00.958548 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6"} err="failed to get container status \"a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6\": rpc error: code = NotFound desc = could not find container \"a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6\": container with ID starting with a200f5dc469154bea073ff8790b585ab135c404765e1cdcdd00acaecbb5452d6 not found: ID does not exist" Jan 30 21:54:01 crc kubenswrapper[4721]: I0130 21:54:01.013512 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:54:01 crc kubenswrapper[4721]: E0130 21:54:01.013930 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:54:01 crc kubenswrapper[4721]: I0130 21:54:01.018516 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" event={"ID":"8ec61729-73e8-4bc5-bc88-db2944c40ff9","Type":"ContainerStarted","Data":"1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a"} Jan 30 21:54:01 crc kubenswrapper[4721]: I0130 21:54:01.018563 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" event={"ID":"8ec61729-73e8-4bc5-bc88-db2944c40ff9","Type":"ContainerStarted","Data":"9d298cd6fde701d7024e5ed52bc2b9e988c671cb12cec47dea2fc278ab6ea2d5"} Jan 30 21:54:01 crc kubenswrapper[4721]: W0130 21:54:01.038131 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a596943_21b2_4c3d_9687_150ce3bde8f7.slice/crio-5ba9eb6052b4515fce32226f498c7ae7f42fe0808f6057100dbd69606e8a5b97 WatchSource:0}: Error finding container 5ba9eb6052b4515fce32226f498c7ae7f42fe0808f6057100dbd69606e8a5b97: Status 404 returned error can't find the container with id 5ba9eb6052b4515fce32226f498c7ae7f42fe0808f6057100dbd69606e8a5b97 Jan 30 21:54:01 crc kubenswrapper[4721]: I0130 21:54:01.060864 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Jan 30 21:54:01 crc kubenswrapper[4721]: I0130 21:54:01.258682 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Jan 30 21:54:01 crc kubenswrapper[4721]: W0130 21:54:01.289432 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41460120_522d_44cf_a772_29cb623f9c14.slice/crio-f98014ea7e48ed60af41a8b6f30089b2d2add2ecc2d7f87093ded65aa79f5260 WatchSource:0}: Error finding container f98014ea7e48ed60af41a8b6f30089b2d2add2ecc2d7f87093ded65aa79f5260: Status 404 returned error can't find the container with id f98014ea7e48ed60af41a8b6f30089b2d2add2ecc2d7f87093ded65aa79f5260 Jan 30 21:54:02 crc kubenswrapper[4721]: I0130 21:54:02.034288 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"0a596943-21b2-4c3d-9687-150ce3bde8f7","Type":"ContainerStarted","Data":"5ba9eb6052b4515fce32226f498c7ae7f42fe0808f6057100dbd69606e8a5b97"} Jan 30 21:54:02 crc kubenswrapper[4721]: I0130 21:54:02.037865 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerID="1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a" exitCode=0 Jan 30 21:54:02 crc kubenswrapper[4721]: I0130 21:54:02.037932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" event={"ID":"8ec61729-73e8-4bc5-bc88-db2944c40ff9","Type":"ContainerDied","Data":"1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a"} Jan 30 21:54:02 crc kubenswrapper[4721]: I0130 21:54:02.039370 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"41460120-522d-44cf-a772-29cb623f9c14","Type":"ContainerStarted","Data":"f98014ea7e48ed60af41a8b6f30089b2d2add2ecc2d7f87093ded65aa79f5260"} Jan 30 21:54:02 crc kubenswrapper[4721]: I0130 21:54:02.111611 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffd01d7d-385c-4b0d-bd25-291e63104c09" path="/var/lib/kubelet/pods/ffd01d7d-385c-4b0d-bd25-291e63104c09/volumes" Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.050213 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" event={"ID":"8ec61729-73e8-4bc5-bc88-db2944c40ff9","Type":"ContainerStarted","Data":"7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc"} Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.051029 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.052994 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"41460120-522d-44cf-a772-29cb623f9c14","Type":"ContainerStarted","Data":"0f0bc2ad44298e5ec77e1436782a4da0a99eb42ddca21362251db995ca22b1e4"} Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.053028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"41460120-522d-44cf-a772-29cb623f9c14","Type":"ContainerStarted","Data":"918c720326f1e36750967eb2c596b205104baecda59cbe94af159505f6b6ce88"} Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.053611 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.075508 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" podStartSLOduration=4.075487848 podStartE2EDuration="4.075487848s" podCreationTimestamp="2026-01-30 21:53:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:54:03.068070717 +0000 UTC m=+2231.859971963" watchObservedRunningTime="2026-01-30 21:54:03.075487848 +0000 UTC m=+2231.867389094" Jan 30 21:54:03 crc kubenswrapper[4721]: I0130 21:54:03.095469 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.095449936 podStartE2EDuration="3.095449936s" podCreationTimestamp="2026-01-30 21:54:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:54:03.090784692 +0000 UTC m=+2231.882685948" watchObservedRunningTime="2026-01-30 21:54:03.095449936 +0000 UTC m=+2231.887351182" Jan 30 21:54:04 crc kubenswrapper[4721]: I0130 21:54:04.067735 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"0a596943-21b2-4c3d-9687-150ce3bde8f7","Type":"ContainerStarted","Data":"ed9e640b0ec98a04503dfd9464e54a471d448957db7fa3d02162d1e2d0c1f726"} Jan 30 21:54:04 crc kubenswrapper[4721]: I0130 21:54:04.105275 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.222236201 podStartE2EDuration="5.10524432s" podCreationTimestamp="2026-01-30 21:53:59 +0000 UTC" firstStartedPulling="2026-01-30 21:54:01.040242326 +0000 UTC m=+2229.832143572" lastFinishedPulling="2026-01-30 21:54:02.923250445 +0000 UTC m=+2231.715151691" observedRunningTime="2026-01-30 21:54:04.100244725 +0000 UTC m=+2232.892145981" watchObservedRunningTime="2026-01-30 21:54:04.10524432 +0000 UTC m=+2232.897145586" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.323771 4721 scope.go:117] "RemoveContainer" containerID="1953b870e28951079a1bf6f916dec1459d856d4cc75e7fc741b3a92f616b4d03" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.481709 4721 scope.go:117] "RemoveContainer" containerID="169c9790b7d1fed25268c931f06c0008853c008639d1085c09c26d8b2d33cd4c" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.518228 4721 scope.go:117] "RemoveContainer" containerID="e82a358abb5a18628caf732080732e283cf518e9c1fd528c33115cf4446a17bd" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.630620 4721 scope.go:117] "RemoveContainer" containerID="ffcb99b059110848fe470768812bd22656ed7d6b17de07a5439602803b18f445" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.846804 4721 scope.go:117] "RemoveContainer" containerID="afae43b438a631fdef96afcae7fe1c842599090c45579e30cd1330541c027b3b" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.871925 4721 scope.go:117] "RemoveContainer" containerID="978798a3bea8af3e753394bf260b4132a08cfaa2c100b30ebd37a1e5f6e1af3b" Jan 30 21:54:06 crc kubenswrapper[4721]: I0130 21:54:06.929040 4721 scope.go:117] "RemoveContainer" containerID="799357dc87eb5d73f30e3ed2c08f7d262f8aaee4eb1d2c0890f02c937bdf096b" Jan 30 21:54:08 crc kubenswrapper[4721]: I0130 21:54:08.687165 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb74w" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" probeResult="failure" output=< Jan 30 21:54:08 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:54:08 crc kubenswrapper[4721]: > Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.449494 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.557830 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-vclzn"] Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.558047 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" podUID="cad43101-ea31-4866-9692-3d0229454653" containerName="dnsmasq-dns" containerID="cri-o://ac6f85eb59f30f19803847a2b67db3f2961f88f4b0fe929d21ad455525873237" gracePeriod=10 Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.800784 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-4gj7m"] Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.805343 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.812850 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-4gj7m"] Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.860574 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.860690 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4xgg\" (UniqueName: \"kubernetes.io/projected/94aad825-d42e-410a-a415-ff4e1910ecdb-kube-api-access-r4xgg\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.860738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-dns-svc\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.860765 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.860867 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-config\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.860966 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.861006 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962491 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-dns-svc\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962546 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962593 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-config\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962649 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962674 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962701 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.962763 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4xgg\" (UniqueName: \"kubernetes.io/projected/94aad825-d42e-410a-a415-ff4e1910ecdb-kube-api-access-r4xgg\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.963567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-config\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.963602 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.963567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-dns-svc\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.963576 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.963692 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.964180 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94aad825-d42e-410a-a415-ff4e1910ecdb-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:09 crc kubenswrapper[4721]: I0130 21:54:09.992611 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4xgg\" (UniqueName: \"kubernetes.io/projected/94aad825-d42e-410a-a415-ff4e1910ecdb-kube-api-access-r4xgg\") pod \"dnsmasq-dns-85f64749dc-4gj7m\" (UID: \"94aad825-d42e-410a-a415-ff4e1910ecdb\") " pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.126993 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.142997 4721 generic.go:334] "Generic (PLEG): container finished" podID="cad43101-ea31-4866-9692-3d0229454653" containerID="ac6f85eb59f30f19803847a2b67db3f2961f88f4b0fe929d21ad455525873237" exitCode=0 Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.143046 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" event={"ID":"cad43101-ea31-4866-9692-3d0229454653","Type":"ContainerDied","Data":"ac6f85eb59f30f19803847a2b67db3f2961f88f4b0fe929d21ad455525873237"} Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.654470 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-4gj7m"] Jan 30 21:54:10 crc kubenswrapper[4721]: W0130 21:54:10.655727 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94aad825_d42e_410a_a415_ff4e1910ecdb.slice/crio-1a9cae246ac1d608cdf27b6e87aa03b8438ea284b8fa77ed875243660b2a8bfa WatchSource:0}: Error finding container 1a9cae246ac1d608cdf27b6e87aa03b8438ea284b8fa77ed875243660b2a8bfa: Status 404 returned error can't find the container with id 1a9cae246ac1d608cdf27b6e87aa03b8438ea284b8fa77ed875243660b2a8bfa Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.837503 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.881440 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-config\") pod \"cad43101-ea31-4866-9692-3d0229454653\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.881699 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-nb\") pod \"cad43101-ea31-4866-9692-3d0229454653\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.881756 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-svc\") pod \"cad43101-ea31-4866-9692-3d0229454653\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.881875 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-swift-storage-0\") pod \"cad43101-ea31-4866-9692-3d0229454653\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.881975 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8b2t\" (UniqueName: \"kubernetes.io/projected/cad43101-ea31-4866-9692-3d0229454653-kube-api-access-f8b2t\") pod \"cad43101-ea31-4866-9692-3d0229454653\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.882022 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-sb\") pod \"cad43101-ea31-4866-9692-3d0229454653\" (UID: \"cad43101-ea31-4866-9692-3d0229454653\") " Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.907205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cad43101-ea31-4866-9692-3d0229454653-kube-api-access-f8b2t" (OuterVolumeSpecName: "kube-api-access-f8b2t") pod "cad43101-ea31-4866-9692-3d0229454653" (UID: "cad43101-ea31-4866-9692-3d0229454653"). InnerVolumeSpecName "kube-api-access-f8b2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:54:10 crc kubenswrapper[4721]: I0130 21:54:10.984448 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8b2t\" (UniqueName: \"kubernetes.io/projected/cad43101-ea31-4866-9692-3d0229454653-kube-api-access-f8b2t\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.018605 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cad43101-ea31-4866-9692-3d0229454653" (UID: "cad43101-ea31-4866-9692-3d0229454653"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.019023 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-config" (OuterVolumeSpecName: "config") pod "cad43101-ea31-4866-9692-3d0229454653" (UID: "cad43101-ea31-4866-9692-3d0229454653"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.020843 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cad43101-ea31-4866-9692-3d0229454653" (UID: "cad43101-ea31-4866-9692-3d0229454653"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.032243 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cad43101-ea31-4866-9692-3d0229454653" (UID: "cad43101-ea31-4866-9692-3d0229454653"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.044777 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cad43101-ea31-4866-9692-3d0229454653" (UID: "cad43101-ea31-4866-9692-3d0229454653"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.087247 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.087286 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.087312 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.087322 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.087331 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad43101-ea31-4866-9692-3d0229454653-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.152979 4721 generic.go:334] "Generic (PLEG): container finished" podID="94aad825-d42e-410a-a415-ff4e1910ecdb" containerID="aca42e6af1d0b7c1f24e39177e1d638b296dad109a805d4518c2336aafeb495c" exitCode=0 Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.153061 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" event={"ID":"94aad825-d42e-410a-a415-ff4e1910ecdb","Type":"ContainerDied","Data":"aca42e6af1d0b7c1f24e39177e1d638b296dad109a805d4518c2336aafeb495c"} Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.153097 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" event={"ID":"94aad825-d42e-410a-a415-ff4e1910ecdb","Type":"ContainerStarted","Data":"1a9cae246ac1d608cdf27b6e87aa03b8438ea284b8fa77ed875243660b2a8bfa"} Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.155839 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" event={"ID":"cad43101-ea31-4866-9692-3d0229454653","Type":"ContainerDied","Data":"e3707ab9ad6086a7c317cb5648d26180d0c5b6f6b3583cce260163c376e8f6f3"} Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.155882 4721 scope.go:117] "RemoveContainer" containerID="ac6f85eb59f30f19803847a2b67db3f2961f88f4b0fe929d21ad455525873237" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.156076 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-vclzn" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.190812 4721 scope.go:117] "RemoveContainer" containerID="c16bf7c51d0d28a3a7616f7dab46219c3261d656031e046b094026becb2a9924" Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.212879 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-vclzn"] Jan 30 21:54:11 crc kubenswrapper[4721]: I0130 21:54:11.222000 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-vclzn"] Jan 30 21:54:12 crc kubenswrapper[4721]: I0130 21:54:12.106876 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cad43101-ea31-4866-9692-3d0229454653" path="/var/lib/kubelet/pods/cad43101-ea31-4866-9692-3d0229454653/volumes" Jan 30 21:54:12 crc kubenswrapper[4721]: I0130 21:54:12.170094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" event={"ID":"94aad825-d42e-410a-a415-ff4e1910ecdb","Type":"ContainerStarted","Data":"055c7332eaa3e405db6f9965e1f93c1fd699c092bdc8539854d323d7c02f9a90"} Jan 30 21:54:12 crc kubenswrapper[4721]: I0130 21:54:12.170339 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:12 crc kubenswrapper[4721]: I0130 21:54:12.197280 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" podStartSLOduration=3.197257949 podStartE2EDuration="3.197257949s" podCreationTimestamp="2026-01-30 21:54:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:54:12.192327106 +0000 UTC m=+2240.984228372" watchObservedRunningTime="2026-01-30 21:54:12.197257949 +0000 UTC m=+2240.989159195" Jan 30 21:54:14 crc kubenswrapper[4721]: I0130 21:54:14.022683 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 30 21:54:15 crc kubenswrapper[4721]: I0130 21:54:15.092776 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:54:15 crc kubenswrapper[4721]: E0130 21:54:15.093758 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:54:18 crc kubenswrapper[4721]: I0130 21:54:18.672983 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb74w" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" probeResult="failure" output=< Jan 30 21:54:18 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 21:54:18 crc kubenswrapper[4721]: > Jan 30 21:54:20 crc kubenswrapper[4721]: I0130 21:54:20.128667 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85f64749dc-4gj7m" Jan 30 21:54:20 crc kubenswrapper[4721]: I0130 21:54:20.255703 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-q92m8"] Jan 30 21:54:20 crc kubenswrapper[4721]: I0130 21:54:20.256062 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerName="dnsmasq-dns" containerID="cri-o://7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc" gracePeriod=10 Jan 30 21:54:20 crc kubenswrapper[4721]: I0130 21:54:20.922416 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.036331 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-config\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.036458 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml74l\" (UniqueName: \"kubernetes.io/projected/8ec61729-73e8-4bc5-bc88-db2944c40ff9-kube-api-access-ml74l\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.036606 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-sb\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.036741 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-swift-storage-0\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.036809 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-svc\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.037032 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-openstack-edpm-ipam\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.037086 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-nb\") pod \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\" (UID: \"8ec61729-73e8-4bc5-bc88-db2944c40ff9\") " Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.042845 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ec61729-73e8-4bc5-bc88-db2944c40ff9-kube-api-access-ml74l" (OuterVolumeSpecName: "kube-api-access-ml74l") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "kube-api-access-ml74l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.093370 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.103873 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-config" (OuterVolumeSpecName: "config") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.105373 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.126114 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.127606 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.140168 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.140202 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.140212 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-config\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.140222 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml74l\" (UniqueName: \"kubernetes.io/projected/8ec61729-73e8-4bc5-bc88-db2944c40ff9-kube-api-access-ml74l\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.140235 4721 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.140245 4721 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.154657 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8ec61729-73e8-4bc5-bc88-db2944c40ff9" (UID: "8ec61729-73e8-4bc5-bc88-db2944c40ff9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.243445 4721 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ec61729-73e8-4bc5-bc88-db2944c40ff9-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.294787 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerID="7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc" exitCode=0 Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.294864 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" event={"ID":"8ec61729-73e8-4bc5-bc88-db2944c40ff9","Type":"ContainerDied","Data":"7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc"} Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.294873 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.294989 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-q92m8" event={"ID":"8ec61729-73e8-4bc5-bc88-db2944c40ff9","Type":"ContainerDied","Data":"9d298cd6fde701d7024e5ed52bc2b9e988c671cb12cec47dea2fc278ab6ea2d5"} Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.295026 4721 scope.go:117] "RemoveContainer" containerID="7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.348154 4721 scope.go:117] "RemoveContainer" containerID="1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.352460 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-q92m8"] Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.364928 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-q92m8"] Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.374791 4721 scope.go:117] "RemoveContainer" containerID="7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc" Jan 30 21:54:21 crc kubenswrapper[4721]: E0130 21:54:21.375317 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc\": container with ID starting with 7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc not found: ID does not exist" containerID="7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.375362 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc"} err="failed to get container status \"7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc\": rpc error: code = NotFound desc = could not find container \"7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc\": container with ID starting with 7f8b275c5e4a7938b0b16eea0e197fd45da8e77f26d52b4423899cda60c075cc not found: ID does not exist" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.375395 4721 scope.go:117] "RemoveContainer" containerID="1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a" Jan 30 21:54:21 crc kubenswrapper[4721]: E0130 21:54:21.375664 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a\": container with ID starting with 1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a not found: ID does not exist" containerID="1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a" Jan 30 21:54:21 crc kubenswrapper[4721]: I0130 21:54:21.375689 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a"} err="failed to get container status \"1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a\": rpc error: code = NotFound desc = could not find container \"1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a\": container with ID starting with 1c55875496787c10532de0440a8b6bcfe81a19780c1d59ef108d3a14a762e06a not found: ID does not exist" Jan 30 21:54:22 crc kubenswrapper[4721]: I0130 21:54:22.106660 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" path="/var/lib/kubelet/pods/8ec61729-73e8-4bc5-bc88-db2944c40ff9/volumes" Jan 30 21:54:27 crc kubenswrapper[4721]: I0130 21:54:27.677246 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:54:27 crc kubenswrapper[4721]: I0130 21:54:27.736698 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:54:28 crc kubenswrapper[4721]: I0130 21:54:28.513208 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cb74w"] Jan 30 21:54:29 crc kubenswrapper[4721]: I0130 21:54:29.092083 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:54:29 crc kubenswrapper[4721]: E0130 21:54:29.092442 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:54:29 crc kubenswrapper[4721]: I0130 21:54:29.380816 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cb74w" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" containerID="cri-o://de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a" gracePeriod=2 Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.008277 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.028786 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-utilities\") pod \"47c095fd-4cfd-4518-9b5d-e71838d800a7\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.028973 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-catalog-content\") pod \"47c095fd-4cfd-4518-9b5d-e71838d800a7\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.029141 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbm7z\" (UniqueName: \"kubernetes.io/projected/47c095fd-4cfd-4518-9b5d-e71838d800a7-kube-api-access-vbm7z\") pod \"47c095fd-4cfd-4518-9b5d-e71838d800a7\" (UID: \"47c095fd-4cfd-4518-9b5d-e71838d800a7\") " Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.030380 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-utilities" (OuterVolumeSpecName: "utilities") pod "47c095fd-4cfd-4518-9b5d-e71838d800a7" (UID: "47c095fd-4cfd-4518-9b5d-e71838d800a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.050102 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c095fd-4cfd-4518-9b5d-e71838d800a7-kube-api-access-vbm7z" (OuterVolumeSpecName: "kube-api-access-vbm7z") pod "47c095fd-4cfd-4518-9b5d-e71838d800a7" (UID: "47c095fd-4cfd-4518-9b5d-e71838d800a7"). InnerVolumeSpecName "kube-api-access-vbm7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.134767 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbm7z\" (UniqueName: \"kubernetes.io/projected/47c095fd-4cfd-4518-9b5d-e71838d800a7-kube-api-access-vbm7z\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.134805 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.177131 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47c095fd-4cfd-4518-9b5d-e71838d800a7" (UID: "47c095fd-4cfd-4518-9b5d-e71838d800a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.236843 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c095fd-4cfd-4518-9b5d-e71838d800a7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.392043 4721 generic.go:334] "Generic (PLEG): container finished" podID="12f1cce2-7b07-4519-b1c3-15e57ed44cde" containerID="c6508fc71e147624bdf5581c6a81dfde9d0cd99d3db9cbf4899971034704eff9" exitCode=0 Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.392118 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"12f1cce2-7b07-4519-b1c3-15e57ed44cde","Type":"ContainerDied","Data":"c6508fc71e147624bdf5581c6a81dfde9d0cd99d3db9cbf4899971034704eff9"} Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.396043 4721 generic.go:334] "Generic (PLEG): container finished" podID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerID="de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a" exitCode=0 Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.396103 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb74w" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.396151 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerDied","Data":"de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a"} Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.396212 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb74w" event={"ID":"47c095fd-4cfd-4518-9b5d-e71838d800a7","Type":"ContainerDied","Data":"f44c26bf727976f625f7608d24526418d394dded8dac463e6122c86d4d60feb4"} Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.396241 4721 scope.go:117] "RemoveContainer" containerID="de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.398455 4721 generic.go:334] "Generic (PLEG): container finished" podID="7ccec6ec-8034-4a0f-88a6-b86751e0f22b" containerID="27107000ff079ca66fd926f83803bee451e0b0ee0ce932d3388a1f3b577fe041" exitCode=0 Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.398499 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ccec6ec-8034-4a0f-88a6-b86751e0f22b","Type":"ContainerDied","Data":"27107000ff079ca66fd926f83803bee451e0b0ee0ce932d3388a1f3b577fe041"} Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.501524 4721 scope.go:117] "RemoveContainer" containerID="5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.505897 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cb74w"] Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.519117 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cb74w"] Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.605840 4721 scope.go:117] "RemoveContainer" containerID="8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.669602 4721 scope.go:117] "RemoveContainer" containerID="de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a" Jan 30 21:54:30 crc kubenswrapper[4721]: E0130 21:54:30.670168 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a\": container with ID starting with de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a not found: ID does not exist" containerID="de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.670204 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a"} err="failed to get container status \"de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a\": rpc error: code = NotFound desc = could not find container \"de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a\": container with ID starting with de214c9c5aa7ad0553fc707bdc97747213c351299785bf43074114c68b483d9a not found: ID does not exist" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.670224 4721 scope.go:117] "RemoveContainer" containerID="5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0" Jan 30 21:54:30 crc kubenswrapper[4721]: E0130 21:54:30.670574 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0\": container with ID starting with 5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0 not found: ID does not exist" containerID="5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.670624 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0"} err="failed to get container status \"5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0\": rpc error: code = NotFound desc = could not find container \"5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0\": container with ID starting with 5f525f675585ec733a2bc4e48642ac82d89ee197e91e4ad4ea990601deec49a0 not found: ID does not exist" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.670656 4721 scope.go:117] "RemoveContainer" containerID="8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7" Jan 30 21:54:30 crc kubenswrapper[4721]: E0130 21:54:30.670990 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7\": container with ID starting with 8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7 not found: ID does not exist" containerID="8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7" Jan 30 21:54:30 crc kubenswrapper[4721]: I0130 21:54:30.671013 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7"} err="failed to get container status \"8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7\": rpc error: code = NotFound desc = could not find container \"8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7\": container with ID starting with 8dbe20a4544ff09c046b2403e3287535d29d4b64aac8cd6a91dcc43da21600c7 not found: ID does not exist" Jan 30 21:54:31 crc kubenswrapper[4721]: I0130 21:54:31.420595 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"12f1cce2-7b07-4519-b1c3-15e57ed44cde","Type":"ContainerStarted","Data":"32a5c23f760b4cd5a93a320c3ba3e32f26362eb669fbd916fff33e0a8b551f0b"} Jan 30 21:54:31 crc kubenswrapper[4721]: I0130 21:54:31.420856 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:54:31 crc kubenswrapper[4721]: I0130 21:54:31.426036 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ccec6ec-8034-4a0f-88a6-b86751e0f22b","Type":"ContainerStarted","Data":"594d3b56101cdcb9ac01cb56d3fe9a2bde147e33b11924cc103a9c88214b27cc"} Jan 30 21:54:31 crc kubenswrapper[4721]: I0130 21:54:31.426688 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 30 21:54:31 crc kubenswrapper[4721]: I0130 21:54:31.455276 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.455254781 podStartE2EDuration="37.455254781s" podCreationTimestamp="2026-01-30 21:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:54:31.44392076 +0000 UTC m=+2260.235822026" watchObservedRunningTime="2026-01-30 21:54:31.455254781 +0000 UTC m=+2260.247156027" Jan 30 21:54:31 crc kubenswrapper[4721]: I0130 21:54:31.492160 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.492133605 podStartE2EDuration="38.492133605s" podCreationTimestamp="2026-01-30 21:53:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 21:54:31.486521551 +0000 UTC m=+2260.278422797" watchObservedRunningTime="2026-01-30 21:54:31.492133605 +0000 UTC m=+2260.284034851" Jan 30 21:54:32 crc kubenswrapper[4721]: I0130 21:54:32.104867 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" path="/var/lib/kubelet/pods/47c095fd-4cfd-4518-9b5d-e71838d800a7/volumes" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.183461 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2"] Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184562 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerName="dnsmasq-dns" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184579 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerName="dnsmasq-dns" Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184596 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="extract-content" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184606 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="extract-content" Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184640 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="extract-utilities" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184649 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="extract-utilities" Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184658 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad43101-ea31-4866-9692-3d0229454653" containerName="init" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184666 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad43101-ea31-4866-9692-3d0229454653" containerName="init" Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184695 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad43101-ea31-4866-9692-3d0229454653" containerName="dnsmasq-dns" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184703 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad43101-ea31-4866-9692-3d0229454653" containerName="dnsmasq-dns" Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184725 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerName="init" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184733 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerName="init" Jan 30 21:54:37 crc kubenswrapper[4721]: E0130 21:54:37.184743 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.184752 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.185021 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cad43101-ea31-4866-9692-3d0229454653" containerName="dnsmasq-dns" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.185037 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c095fd-4cfd-4518-9b5d-e71838d800a7" containerName="registry-server" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.185053 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ec61729-73e8-4bc5-bc88-db2944c40ff9" containerName="dnsmasq-dns" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.185963 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.189088 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.189705 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.191670 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.192628 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.210887 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2"] Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.319766 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.319858 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.319903 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.319973 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppn57\" (UniqueName: \"kubernetes.io/projected/afddf697-9175-4a72-8226-bcb7030604f9-kube-api-access-ppn57\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.422098 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppn57\" (UniqueName: \"kubernetes.io/projected/afddf697-9175-4a72-8226-bcb7030604f9-kube-api-access-ppn57\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.422762 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.423816 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.423986 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.429549 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.430151 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.432334 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.445997 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppn57\" (UniqueName: \"kubernetes.io/projected/afddf697-9175-4a72-8226-bcb7030604f9-kube-api-access-ppn57\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.533924 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:54:37 crc kubenswrapper[4721]: I0130 21:54:37.762407 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Jan 30 21:54:38 crc kubenswrapper[4721]: I0130 21:54:38.174461 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2"] Jan 30 21:54:38 crc kubenswrapper[4721]: I0130 21:54:38.489609 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" event={"ID":"afddf697-9175-4a72-8226-bcb7030604f9","Type":"ContainerStarted","Data":"7f92d602ca6286dddec9607446e596e515684d5e6587c198b2d09eebc27feb51"} Jan 30 21:54:41 crc kubenswrapper[4721]: I0130 21:54:41.092751 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:54:41 crc kubenswrapper[4721]: E0130 21:54:41.093225 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:54:44 crc kubenswrapper[4721]: I0130 21:54:44.851499 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 30 21:54:45 crc kubenswrapper[4721]: I0130 21:54:45.123482 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 30 21:54:49 crc kubenswrapper[4721]: I0130 21:54:49.636087 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" event={"ID":"afddf697-9175-4a72-8226-bcb7030604f9","Type":"ContainerStarted","Data":"2df6cb559ce20fddfa90a3ddfcc6be6a9ef060698d3d33dd44b2dd1d65766951"} Jan 30 21:54:49 crc kubenswrapper[4721]: I0130 21:54:49.662801 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" podStartSLOduration=2.014436682 podStartE2EDuration="12.662779528s" podCreationTimestamp="2026-01-30 21:54:37 +0000 UTC" firstStartedPulling="2026-01-30 21:54:38.180570566 +0000 UTC m=+2266.972471812" lastFinishedPulling="2026-01-30 21:54:48.828913412 +0000 UTC m=+2277.620814658" observedRunningTime="2026-01-30 21:54:49.650707463 +0000 UTC m=+2278.442608709" watchObservedRunningTime="2026-01-30 21:54:49.662779528 +0000 UTC m=+2278.454680774" Jan 30 21:54:54 crc kubenswrapper[4721]: I0130 21:54:54.093787 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:54:54 crc kubenswrapper[4721]: E0130 21:54:54.095086 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:55:00 crc kubenswrapper[4721]: I0130 21:55:00.754514 4721 generic.go:334] "Generic (PLEG): container finished" podID="afddf697-9175-4a72-8226-bcb7030604f9" containerID="2df6cb559ce20fddfa90a3ddfcc6be6a9ef060698d3d33dd44b2dd1d65766951" exitCode=0 Jan 30 21:55:00 crc kubenswrapper[4721]: I0130 21:55:00.754620 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" event={"ID":"afddf697-9175-4a72-8226-bcb7030604f9","Type":"ContainerDied","Data":"2df6cb559ce20fddfa90a3ddfcc6be6a9ef060698d3d33dd44b2dd1d65766951"} Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.208614 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.309790 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppn57\" (UniqueName: \"kubernetes.io/projected/afddf697-9175-4a72-8226-bcb7030604f9-kube-api-access-ppn57\") pod \"afddf697-9175-4a72-8226-bcb7030604f9\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.309969 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-repo-setup-combined-ca-bundle\") pod \"afddf697-9175-4a72-8226-bcb7030604f9\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.310230 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-inventory\") pod \"afddf697-9175-4a72-8226-bcb7030604f9\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.310519 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-ssh-key-openstack-edpm-ipam\") pod \"afddf697-9175-4a72-8226-bcb7030604f9\" (UID: \"afddf697-9175-4a72-8226-bcb7030604f9\") " Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.320501 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "afddf697-9175-4a72-8226-bcb7030604f9" (UID: "afddf697-9175-4a72-8226-bcb7030604f9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.320528 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afddf697-9175-4a72-8226-bcb7030604f9-kube-api-access-ppn57" (OuterVolumeSpecName: "kube-api-access-ppn57") pod "afddf697-9175-4a72-8226-bcb7030604f9" (UID: "afddf697-9175-4a72-8226-bcb7030604f9"). InnerVolumeSpecName "kube-api-access-ppn57". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.346840 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "afddf697-9175-4a72-8226-bcb7030604f9" (UID: "afddf697-9175-4a72-8226-bcb7030604f9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.351897 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-inventory" (OuterVolumeSpecName: "inventory") pod "afddf697-9175-4a72-8226-bcb7030604f9" (UID: "afddf697-9175-4a72-8226-bcb7030604f9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.413660 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.413923 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppn57\" (UniqueName: \"kubernetes.io/projected/afddf697-9175-4a72-8226-bcb7030604f9-kube-api-access-ppn57\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.413931 4721 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.413942 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afddf697-9175-4a72-8226-bcb7030604f9-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.781702 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" event={"ID":"afddf697-9175-4a72-8226-bcb7030604f9","Type":"ContainerDied","Data":"7f92d602ca6286dddec9607446e596e515684d5e6587c198b2d09eebc27feb51"} Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.781754 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f92d602ca6286dddec9607446e596e515684d5e6587c198b2d09eebc27feb51" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.781824 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.857292 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q"] Jan 30 21:55:02 crc kubenswrapper[4721]: E0130 21:55:02.857846 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afddf697-9175-4a72-8226-bcb7030604f9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.857871 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="afddf697-9175-4a72-8226-bcb7030604f9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.858125 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="afddf697-9175-4a72-8226-bcb7030604f9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.858922 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.861057 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.861374 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.864224 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.864463 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.883481 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q"] Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.923588 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.923792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tt9c\" (UniqueName: \"kubernetes.io/projected/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-kube-api-access-2tt9c\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:02 crc kubenswrapper[4721]: I0130 21:55:02.924015 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.025682 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tt9c\" (UniqueName: \"kubernetes.io/projected/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-kube-api-access-2tt9c\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.025784 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.025838 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.030086 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.030148 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.043063 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tt9c\" (UniqueName: \"kubernetes.io/projected/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-kube-api-access-2tt9c\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lf29q\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.182928 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:03 crc kubenswrapper[4721]: W0130 21:55:03.820943 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97ad0a5f_02dd_48d9_93a4_b7c7b9462879.slice/crio-f789a4515c0929ae4e5da145c66a1c013f1128a0c1b1f193707a6036f7808d66 WatchSource:0}: Error finding container f789a4515c0929ae4e5da145c66a1c013f1128a0c1b1f193707a6036f7808d66: Status 404 returned error can't find the container with id f789a4515c0929ae4e5da145c66a1c013f1128a0c1b1f193707a6036f7808d66 Jan 30 21:55:03 crc kubenswrapper[4721]: I0130 21:55:03.825203 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q"] Jan 30 21:55:04 crc kubenswrapper[4721]: I0130 21:55:04.806050 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" event={"ID":"97ad0a5f-02dd-48d9-93a4-b7c7b9462879","Type":"ContainerStarted","Data":"32d5f463d8e8e334fbaae59ff1784cc6e2b70662daa20d89ba6a8e643f8b5ed1"} Jan 30 21:55:04 crc kubenswrapper[4721]: I0130 21:55:04.806403 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" event={"ID":"97ad0a5f-02dd-48d9-93a4-b7c7b9462879","Type":"ContainerStarted","Data":"f789a4515c0929ae4e5da145c66a1c013f1128a0c1b1f193707a6036f7808d66"} Jan 30 21:55:04 crc kubenswrapper[4721]: I0130 21:55:04.837115 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" podStartSLOduration=2.39847151 podStartE2EDuration="2.837098326s" podCreationTimestamp="2026-01-30 21:55:02 +0000 UTC" firstStartedPulling="2026-01-30 21:55:03.826191328 +0000 UTC m=+2292.618092574" lastFinishedPulling="2026-01-30 21:55:04.264818134 +0000 UTC m=+2293.056719390" observedRunningTime="2026-01-30 21:55:04.824895457 +0000 UTC m=+2293.616796723" watchObservedRunningTime="2026-01-30 21:55:04.837098326 +0000 UTC m=+2293.628999562" Jan 30 21:55:07 crc kubenswrapper[4721]: I0130 21:55:07.861483 4721 generic.go:334] "Generic (PLEG): container finished" podID="97ad0a5f-02dd-48d9-93a4-b7c7b9462879" containerID="32d5f463d8e8e334fbaae59ff1784cc6e2b70662daa20d89ba6a8e643f8b5ed1" exitCode=0 Jan 30 21:55:07 crc kubenswrapper[4721]: I0130 21:55:07.861567 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" event={"ID":"97ad0a5f-02dd-48d9-93a4-b7c7b9462879","Type":"ContainerDied","Data":"32d5f463d8e8e334fbaae59ff1784cc6e2b70662daa20d89ba6a8e643f8b5ed1"} Jan 30 21:55:08 crc kubenswrapper[4721]: I0130 21:55:08.092371 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:55:08 crc kubenswrapper[4721]: E0130 21:55:08.092745 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.405025 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.471328 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-inventory\") pod \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.471528 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tt9c\" (UniqueName: \"kubernetes.io/projected/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-kube-api-access-2tt9c\") pod \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.471618 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-ssh-key-openstack-edpm-ipam\") pod \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\" (UID: \"97ad0a5f-02dd-48d9-93a4-b7c7b9462879\") " Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.483623 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-kube-api-access-2tt9c" (OuterVolumeSpecName: "kube-api-access-2tt9c") pod "97ad0a5f-02dd-48d9-93a4-b7c7b9462879" (UID: "97ad0a5f-02dd-48d9-93a4-b7c7b9462879"). InnerVolumeSpecName "kube-api-access-2tt9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.501278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "97ad0a5f-02dd-48d9-93a4-b7c7b9462879" (UID: "97ad0a5f-02dd-48d9-93a4-b7c7b9462879"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.509905 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-inventory" (OuterVolumeSpecName: "inventory") pod "97ad0a5f-02dd-48d9-93a4-b7c7b9462879" (UID: "97ad0a5f-02dd-48d9-93a4-b7c7b9462879"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.574316 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.574353 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tt9c\" (UniqueName: \"kubernetes.io/projected/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-kube-api-access-2tt9c\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.574370 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/97ad0a5f-02dd-48d9-93a4-b7c7b9462879-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.888574 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" event={"ID":"97ad0a5f-02dd-48d9-93a4-b7c7b9462879","Type":"ContainerDied","Data":"f789a4515c0929ae4e5da145c66a1c013f1128a0c1b1f193707a6036f7808d66"} Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.888803 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f789a4515c0929ae4e5da145c66a1c013f1128a0c1b1f193707a6036f7808d66" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.888862 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lf29q" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.966193 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr"] Jan 30 21:55:09 crc kubenswrapper[4721]: E0130 21:55:09.966623 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97ad0a5f-02dd-48d9-93a4-b7c7b9462879" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.966640 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="97ad0a5f-02dd-48d9-93a4-b7c7b9462879" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.966820 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="97ad0a5f-02dd-48d9-93a4-b7c7b9462879" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.967537 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.969667 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.970171 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.970192 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.971457 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 21:55:09 crc kubenswrapper[4721]: I0130 21:55:09.980021 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr"] Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.084105 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.084176 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xnlq\" (UniqueName: \"kubernetes.io/projected/0093f639-dd37-4e8d-86da-c6149cb3a4c4-kube-api-access-9xnlq\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.084556 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.084708 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.187147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.187355 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.187387 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xnlq\" (UniqueName: \"kubernetes.io/projected/0093f639-dd37-4e8d-86da-c6149cb3a4c4-kube-api-access-9xnlq\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.187505 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.192752 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.193624 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.193976 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.204169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xnlq\" (UniqueName: \"kubernetes.io/projected/0093f639-dd37-4e8d-86da-c6149cb3a4c4-kube-api-access-9xnlq\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.281921 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.855246 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr"] Jan 30 21:55:10 crc kubenswrapper[4721]: W0130 21:55:10.856740 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0093f639_dd37_4e8d_86da_c6149cb3a4c4.slice/crio-17eb9e2ab525cff310eef7098d3c376b13eda86c7bd964fa78de6c2556c38bd5 WatchSource:0}: Error finding container 17eb9e2ab525cff310eef7098d3c376b13eda86c7bd964fa78de6c2556c38bd5: Status 404 returned error can't find the container with id 17eb9e2ab525cff310eef7098d3c376b13eda86c7bd964fa78de6c2556c38bd5 Jan 30 21:55:10 crc kubenswrapper[4721]: I0130 21:55:10.901813 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" event={"ID":"0093f639-dd37-4e8d-86da-c6149cb3a4c4","Type":"ContainerStarted","Data":"17eb9e2ab525cff310eef7098d3c376b13eda86c7bd964fa78de6c2556c38bd5"} Jan 30 21:55:11 crc kubenswrapper[4721]: I0130 21:55:11.913607 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" event={"ID":"0093f639-dd37-4e8d-86da-c6149cb3a4c4","Type":"ContainerStarted","Data":"95e7aebd7beb7b59a23955277a2beaa2b3e2f30b0683604a4a6a540d718762cb"} Jan 30 21:55:22 crc kubenswrapper[4721]: I0130 21:55:22.092525 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:55:22 crc kubenswrapper[4721]: E0130 21:55:22.093418 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:55:33 crc kubenswrapper[4721]: I0130 21:55:33.092483 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:55:33 crc kubenswrapper[4721]: E0130 21:55:33.093367 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:55:47 crc kubenswrapper[4721]: I0130 21:55:47.091916 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:55:47 crc kubenswrapper[4721]: E0130 21:55:47.093011 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:55:59 crc kubenswrapper[4721]: I0130 21:55:59.093884 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:55:59 crc kubenswrapper[4721]: E0130 21:55:59.094846 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:56:05 crc kubenswrapper[4721]: I0130 21:56:05.039710 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" podStartSLOduration=55.558091354 podStartE2EDuration="56.039659962s" podCreationTimestamp="2026-01-30 21:55:09 +0000 UTC" firstStartedPulling="2026-01-30 21:55:10.8595988 +0000 UTC m=+2299.651500046" lastFinishedPulling="2026-01-30 21:55:11.341167398 +0000 UTC m=+2300.133068654" observedRunningTime="2026-01-30 21:55:11.937575408 +0000 UTC m=+2300.729476694" watchObservedRunningTime="2026-01-30 21:56:05.039659962 +0000 UTC m=+2353.831561218" Jan 30 21:56:05 crc kubenswrapper[4721]: I0130 21:56:05.053515 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-t6hwz"] Jan 30 21:56:05 crc kubenswrapper[4721]: I0130 21:56:05.067289 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-t6hwz"] Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.034714 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bc0f-account-create-update-8z8k5"] Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.061553 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-19d6-account-create-update-crh98"] Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.072038 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-19d6-account-create-update-crh98"] Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.080920 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bc0f-account-create-update-8z8k5"] Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.090461 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-49vqb"] Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.106178 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5" path="/var/lib/kubelet/pods/7ec1d7ec-c666-4a81-bf3d-afc8c29fb0c5/volumes" Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.107817 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ce4272-16c3-4f60-bbde-2149a9ed8138" path="/var/lib/kubelet/pods/d8ce4272-16c3-4f60-bbde-2149a9ed8138/volumes" Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.108783 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1cf23c5-49c5-4eed-93ed-30d6c223c74e" path="/var/lib/kubelet/pods/e1cf23c5-49c5-4eed-93ed-30d6c223c74e/volumes" Jan 30 21:56:06 crc kubenswrapper[4721]: I0130 21:56:06.110344 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-49vqb"] Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.029506 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-qgj6c"] Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.047733 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-27c2-account-create-update-mhsz2"] Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.061489 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-qgj6c"] Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.073939 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-27c2-account-create-update-mhsz2"] Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.605897 4721 scope.go:117] "RemoveContainer" containerID="740f6cbd3cd49dbe7c039fc5d55dce6094eaaa2afce31cb3c0a644207fc21e7e" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.671329 4721 scope.go:117] "RemoveContainer" containerID="0743ad15c2a957b56e9a7cdef78066074f4cf54d60fc58b8a47459a84cb76faf" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.709245 4721 scope.go:117] "RemoveContainer" containerID="9fc1b258d9893762813bf34eff9e539ff558bc5c12e6d75eec578cd3036607c3" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.731913 4721 scope.go:117] "RemoveContainer" containerID="495884936e66f0212054f41ae9c29eec370bd2355362ed832a6beb09366045eb" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.760858 4721 scope.go:117] "RemoveContainer" containerID="2794980561bb3f8a50f27e22f17663fafa3ec6d2d78d4d797ccb92b725ff23b3" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.787186 4721 scope.go:117] "RemoveContainer" containerID="c7312924b9986aecfa66ec0edc754acd51c3621de0fbe1866add74c28617418d" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.814948 4721 scope.go:117] "RemoveContainer" containerID="333e38ede45fb6e36c3f7950a1d5ce2ea248cff09092e32045ff9bdbd34cb9e6" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.878559 4721 scope.go:117] "RemoveContainer" containerID="54c498dc7adc7dcf0952499beee857eb48d868a2f5536440590dc0c5ce441c9c" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.903246 4721 scope.go:117] "RemoveContainer" containerID="bd78361dbf0c20ca745e3caa7cfbfe205c0893edd4e259a868e5416dc94e1d55" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.927462 4721 scope.go:117] "RemoveContainer" containerID="569ac664f271273d28adff4b507b3b3f20c4031671b43bf4a29877af6b05c6b5" Jan 30 21:56:07 crc kubenswrapper[4721]: I0130 21:56:07.965931 4721 scope.go:117] "RemoveContainer" containerID="05846b20f1a784244e1256c20804886049c326803873082b572b948348aede3c" Jan 30 21:56:08 crc kubenswrapper[4721]: I0130 21:56:08.033488 4721 scope.go:117] "RemoveContainer" containerID="868132c70b29e954b284022bfba9ab97f4467f981546db42e82be0708ac2aa5e" Jan 30 21:56:08 crc kubenswrapper[4721]: I0130 21:56:08.106717 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c869ca8-e78b-46b1-8223-1cc8b4d50551" path="/var/lib/kubelet/pods/4c869ca8-e78b-46b1-8223-1cc8b4d50551/volumes" Jan 30 21:56:08 crc kubenswrapper[4721]: I0130 21:56:08.108170 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7015b0db-a3ba-49d3-a6c7-63e581ef7ac5" path="/var/lib/kubelet/pods/7015b0db-a3ba-49d3-a6c7-63e581ef7ac5/volumes" Jan 30 21:56:08 crc kubenswrapper[4721]: I0130 21:56:08.109502 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc85e9a0-f671-4a65-9989-81274e180dd9" path="/var/lib/kubelet/pods/fc85e9a0-f671-4a65-9989-81274e180dd9/volumes" Jan 30 21:56:11 crc kubenswrapper[4721]: I0130 21:56:11.092799 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:56:11 crc kubenswrapper[4721]: E0130 21:56:11.093338 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:56:23 crc kubenswrapper[4721]: I0130 21:56:23.093476 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:56:23 crc kubenswrapper[4721]: E0130 21:56:23.094805 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:56:35 crc kubenswrapper[4721]: I0130 21:56:35.092609 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:56:35 crc kubenswrapper[4721]: E0130 21:56:35.093441 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.058414 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-mxcq9"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.074208 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-778cb"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.086631 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-bxvhs"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.101294 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-bxvhs"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.113208 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-778cb"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.123771 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-mxcq9"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.132514 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-zw7sq"] Jan 30 21:56:43 crc kubenswrapper[4721]: I0130 21:56:43.140807 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-zw7sq"] Jan 30 21:56:44 crc kubenswrapper[4721]: I0130 21:56:44.153805 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4553135b-6050-4c65-8f3f-e20a998bb7b0" path="/var/lib/kubelet/pods/4553135b-6050-4c65-8f3f-e20a998bb7b0/volumes" Jan 30 21:56:44 crc kubenswrapper[4721]: I0130 21:56:44.156392 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69c66a91-2e9d-4de3-b97c-726ef7ff501d" path="/var/lib/kubelet/pods/69c66a91-2e9d-4de3-b97c-726ef7ff501d/volumes" Jan 30 21:56:44 crc kubenswrapper[4721]: I0130 21:56:44.157521 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf51132-1e9b-4b5e-bd24-c7290cebd23c" path="/var/lib/kubelet/pods/6bf51132-1e9b-4b5e-bd24-c7290cebd23c/volumes" Jan 30 21:56:44 crc kubenswrapper[4721]: I0130 21:56:44.161192 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8583562e-347f-4aed-9977-0b02f27f3e4f" path="/var/lib/kubelet/pods/8583562e-347f-4aed-9977-0b02f27f3e4f/volumes" Jan 30 21:56:46 crc kubenswrapper[4721]: I0130 21:56:46.092422 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:56:46 crc kubenswrapper[4721]: E0130 21:56:46.093079 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:56:58 crc kubenswrapper[4721]: I0130 21:56:58.100955 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:56:58 crc kubenswrapper[4721]: E0130 21:56:58.101823 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.212568 4721 scope.go:117] "RemoveContainer" containerID="767b137197c46c50a6c4af9e2c32e06aefbbaf90714022efb2f07324a3162df8" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.248556 4721 scope.go:117] "RemoveContainer" containerID="d84e6fe0dc81e7bba3f335390a28e571dd22f28c5e6a19cc0f54061b74af833c" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.307935 4721 scope.go:117] "RemoveContainer" containerID="09c00efc71846dcb98bc3a082d566774c0dd7da4febac4139d733c0872470795" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.372755 4721 scope.go:117] "RemoveContainer" containerID="ff989b28d4328c3a73759e5a9853666df8367ae66b5a8eb72c5700f4068a8fc7" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.417636 4721 scope.go:117] "RemoveContainer" containerID="7000253c33e1b67e7dcde6d92e872c7bd738a2a8fe2c2fd61ee7b0c8c41544a7" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.456569 4721 scope.go:117] "RemoveContainer" containerID="33dfbbfef66356b75dcf7be8b9e2fa768e3971a4138a930fbcb480a49549e106" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.514999 4721 scope.go:117] "RemoveContainer" containerID="d7307dcb02312290efaedd3ca8fe92c3e1a45e1d4b8c17f0ba4e2d114a3c5b66" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.559324 4721 scope.go:117] "RemoveContainer" containerID="a1e784bb9b7553afc8baa6063cf52587a1e8731ba6391c7ceb670564962bb283" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.592878 4721 scope.go:117] "RemoveContainer" containerID="9e80b5aed331fd827eda7708338e0930cd7645c42a0cb0a71bdaedbb7213011a" Jan 30 21:57:08 crc kubenswrapper[4721]: I0130 21:57:08.666868 4721 scope.go:117] "RemoveContainer" containerID="85ed3b69bef3fd4189566e449edcfd497bea6e3e4c800cd19def67c6421775e1" Jan 30 21:57:09 crc kubenswrapper[4721]: I0130 21:57:09.091867 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:57:09 crc kubenswrapper[4721]: E0130 21:57:09.092133 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:57:10 crc kubenswrapper[4721]: I0130 21:57:10.046973 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-2cvzt"] Jan 30 21:57:10 crc kubenswrapper[4721]: I0130 21:57:10.064324 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-2cvzt"] Jan 30 21:57:10 crc kubenswrapper[4721]: I0130 21:57:10.104805 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e592b8-6959-4d00-94fa-fcab154f8615" path="/var/lib/kubelet/pods/56e592b8-6959-4d00-94fa-fcab154f8615/volumes" Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.057388 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-4af6-account-create-update-tkzks"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.068248 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b8f4-account-create-update-gthdn"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.079502 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-f0bb-account-create-update-k7wz8"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.090503 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-2796-account-create-update-jbzxh"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.100832 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-4af6-account-create-update-tkzks"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.110115 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-2796-account-create-update-jbzxh"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.120450 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-f0bb-account-create-update-k7wz8"] Jan 30 21:57:13 crc kubenswrapper[4721]: I0130 21:57:13.131967 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b8f4-account-create-update-gthdn"] Jan 30 21:57:14 crc kubenswrapper[4721]: I0130 21:57:14.105798 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23a03053-c813-4fd0-b38b-f30f2e40a0cf" path="/var/lib/kubelet/pods/23a03053-c813-4fd0-b38b-f30f2e40a0cf/volumes" Jan 30 21:57:14 crc kubenswrapper[4721]: I0130 21:57:14.107080 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2badf082-9873-424a-976c-4b9fde4bf13a" path="/var/lib/kubelet/pods/2badf082-9873-424a-976c-4b9fde4bf13a/volumes" Jan 30 21:57:14 crc kubenswrapper[4721]: I0130 21:57:14.111901 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="442a6735-5080-4fb6-89c0-57bcd08015a6" path="/var/lib/kubelet/pods/442a6735-5080-4fb6-89c0-57bcd08015a6/volumes" Jan 30 21:57:14 crc kubenswrapper[4721]: I0130 21:57:14.114107 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3e608c6-6350-4402-a53b-e0e5c55ae5b8" path="/var/lib/kubelet/pods/b3e608c6-6350-4402-a53b-e0e5c55ae5b8/volumes" Jan 30 21:57:21 crc kubenswrapper[4721]: I0130 21:57:21.092200 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:57:21 crc kubenswrapper[4721]: E0130 21:57:21.092989 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:57:23 crc kubenswrapper[4721]: I0130 21:57:23.026505 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-l2tjn"] Jan 30 21:57:23 crc kubenswrapper[4721]: I0130 21:57:23.035966 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-l2tjn"] Jan 30 21:57:24 crc kubenswrapper[4721]: I0130 21:57:24.104928 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c432809-1bbc-46aa-b2bb-4cc7fd182b5c" path="/var/lib/kubelet/pods/7c432809-1bbc-46aa-b2bb-4cc7fd182b5c/volumes" Jan 30 21:57:33 crc kubenswrapper[4721]: I0130 21:57:33.092118 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:57:33 crc kubenswrapper[4721]: E0130 21:57:33.092993 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:57:38 crc kubenswrapper[4721]: I0130 21:57:38.052263 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-ffrrr"] Jan 30 21:57:38 crc kubenswrapper[4721]: I0130 21:57:38.069332 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-ffrrr"] Jan 30 21:57:38 crc kubenswrapper[4721]: I0130 21:57:38.124979 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="befb6e6d-91a5-46af-9c9d-59688cfbb6ec" path="/var/lib/kubelet/pods/befb6e6d-91a5-46af-9c9d-59688cfbb6ec/volumes" Jan 30 21:57:45 crc kubenswrapper[4721]: I0130 21:57:45.092223 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:57:45 crc kubenswrapper[4721]: E0130 21:57:45.093117 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:57:57 crc kubenswrapper[4721]: I0130 21:57:57.092018 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:57:57 crc kubenswrapper[4721]: E0130 21:57:57.092934 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:58:03 crc kubenswrapper[4721]: I0130 21:58:03.770509 4721 generic.go:334] "Generic (PLEG): container finished" podID="0093f639-dd37-4e8d-86da-c6149cb3a4c4" containerID="95e7aebd7beb7b59a23955277a2beaa2b3e2f30b0683604a4a6a540d718762cb" exitCode=0 Jan 30 21:58:03 crc kubenswrapper[4721]: I0130 21:58:03.770575 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" event={"ID":"0093f639-dd37-4e8d-86da-c6149cb3a4c4","Type":"ContainerDied","Data":"95e7aebd7beb7b59a23955277a2beaa2b3e2f30b0683604a4a6a540d718762cb"} Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.262883 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.320208 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-bootstrap-combined-ca-bundle\") pod \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.320307 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xnlq\" (UniqueName: \"kubernetes.io/projected/0093f639-dd37-4e8d-86da-c6149cb3a4c4-kube-api-access-9xnlq\") pod \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.320351 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-inventory\") pod \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.320617 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-ssh-key-openstack-edpm-ipam\") pod \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\" (UID: \"0093f639-dd37-4e8d-86da-c6149cb3a4c4\") " Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.327465 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "0093f639-dd37-4e8d-86da-c6149cb3a4c4" (UID: "0093f639-dd37-4e8d-86da-c6149cb3a4c4"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.328626 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0093f639-dd37-4e8d-86da-c6149cb3a4c4-kube-api-access-9xnlq" (OuterVolumeSpecName: "kube-api-access-9xnlq") pod "0093f639-dd37-4e8d-86da-c6149cb3a4c4" (UID: "0093f639-dd37-4e8d-86da-c6149cb3a4c4"). InnerVolumeSpecName "kube-api-access-9xnlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.358477 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-inventory" (OuterVolumeSpecName: "inventory") pod "0093f639-dd37-4e8d-86da-c6149cb3a4c4" (UID: "0093f639-dd37-4e8d-86da-c6149cb3a4c4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.362031 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0093f639-dd37-4e8d-86da-c6149cb3a4c4" (UID: "0093f639-dd37-4e8d-86da-c6149cb3a4c4"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.424178 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.424219 4721 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.424229 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xnlq\" (UniqueName: \"kubernetes.io/projected/0093f639-dd37-4e8d-86da-c6149cb3a4c4-kube-api-access-9xnlq\") on node \"crc\" DevicePath \"\"" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.424238 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0093f639-dd37-4e8d-86da-c6149cb3a4c4-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.789546 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" event={"ID":"0093f639-dd37-4e8d-86da-c6149cb3a4c4","Type":"ContainerDied","Data":"17eb9e2ab525cff310eef7098d3c376b13eda86c7bd964fa78de6c2556c38bd5"} Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.789589 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.789596 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17eb9e2ab525cff310eef7098d3c376b13eda86c7bd964fa78de6c2556c38bd5" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.885033 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6"] Jan 30 21:58:05 crc kubenswrapper[4721]: E0130 21:58:05.885636 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0093f639-dd37-4e8d-86da-c6149cb3a4c4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.885659 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0093f639-dd37-4e8d-86da-c6149cb3a4c4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.885854 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0093f639-dd37-4e8d-86da-c6149cb3a4c4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.886666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.889442 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.889539 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.889565 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.889626 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.895400 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6"] Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.936559 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.936634 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:05 crc kubenswrapper[4721]: I0130 21:58:05.937018 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qmn4\" (UniqueName: \"kubernetes.io/projected/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-kube-api-access-4qmn4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.038988 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qmn4\" (UniqueName: \"kubernetes.io/projected/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-kube-api-access-4qmn4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.039106 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.039166 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.048335 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.048421 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.060732 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qmn4\" (UniqueName: \"kubernetes.io/projected/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-kube-api-access-4qmn4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.208556 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.854763 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6"] Jan 30 21:58:06 crc kubenswrapper[4721]: I0130 21:58:06.862859 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 21:58:07 crc kubenswrapper[4721]: I0130 21:58:07.811352 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" event={"ID":"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665","Type":"ContainerStarted","Data":"717acacee0f78e76e6b6d42b71f5d768cee250f6530665f22e77895dcee8e65d"} Jan 30 21:58:08 crc kubenswrapper[4721]: I0130 21:58:08.825883 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" event={"ID":"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665","Type":"ContainerStarted","Data":"df265b7a768404e0d14d2764f376da9572e093bdabb8653561dc2286ec4808a1"} Jan 30 21:58:08 crc kubenswrapper[4721]: I0130 21:58:08.849286 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" podStartSLOduration=2.637942638 podStartE2EDuration="3.849244549s" podCreationTimestamp="2026-01-30 21:58:05 +0000 UTC" firstStartedPulling="2026-01-30 21:58:06.862622734 +0000 UTC m=+2475.654523980" lastFinishedPulling="2026-01-30 21:58:08.073924645 +0000 UTC m=+2476.865825891" observedRunningTime="2026-01-30 21:58:08.841209038 +0000 UTC m=+2477.633110294" watchObservedRunningTime="2026-01-30 21:58:08.849244549 +0000 UTC m=+2477.641145795" Jan 30 21:58:08 crc kubenswrapper[4721]: I0130 21:58:08.907825 4721 scope.go:117] "RemoveContainer" containerID="89dfe56baa8a440d7c8c9d95c2a90eb2a5e0d2e6bed0bdb948854f1d536b1997" Jan 30 21:58:08 crc kubenswrapper[4721]: I0130 21:58:08.980375 4721 scope.go:117] "RemoveContainer" containerID="7bbdc18fbee129f6a1f262ea43189deae8924c2717fd8a9e14cdcadf90854e4e" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.027769 4721 scope.go:117] "RemoveContainer" containerID="d8bb6d6ee3784ff7ea78566cbd1e939b94cc17105ffd71b68bed607eb538aff1" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.068219 4721 scope.go:117] "RemoveContainer" containerID="2d4f8defd27885510400e99948248ff9ce180dca6370a421238fd9f4460d783a" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.092141 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:58:09 crc kubenswrapper[4721]: E0130 21:58:09.092877 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.126282 4721 scope.go:117] "RemoveContainer" containerID="5dd298f39c14e1ee346a82eb396a55b2437f9d2dd2835eba61d04d5c984168ab" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.218852 4721 scope.go:117] "RemoveContainer" containerID="39d4fbba2d60a4a44120b64620a6fdfa146b901508140eefee5b57800d9b08a7" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.241869 4721 scope.go:117] "RemoveContainer" containerID="c3191038524903b8c891f7b94fd86beb1ae608fab0f65ad79bdaa9aa73456193" Jan 30 21:58:09 crc kubenswrapper[4721]: I0130 21:58:09.305617 4721 scope.go:117] "RemoveContainer" containerID="232bda3c110a7fd950109400b3a3c951da38b7594eefff4ba1a383e7c3f2ec64" Jan 30 21:58:22 crc kubenswrapper[4721]: I0130 21:58:22.100959 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:58:22 crc kubenswrapper[4721]: E0130 21:58:22.101899 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:58:33 crc kubenswrapper[4721]: I0130 21:58:33.093117 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:58:33 crc kubenswrapper[4721]: E0130 21:58:33.093998 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:58:46 crc kubenswrapper[4721]: I0130 21:58:46.093799 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:58:46 crc kubenswrapper[4721]: E0130 21:58:46.094860 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:58:57 crc kubenswrapper[4721]: I0130 21:58:57.092664 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:58:57 crc kubenswrapper[4721]: E0130 21:58:57.093591 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 21:59:12 crc kubenswrapper[4721]: I0130 21:59:12.099734 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 21:59:12 crc kubenswrapper[4721]: I0130 21:59:12.539747 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"bf92663f77af193c5a48bd2b97d08d0766d0048fc46d63b788adab121454a826"} Jan 30 21:59:37 crc kubenswrapper[4721]: I0130 21:59:37.054712 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5kbhd"] Jan 30 21:59:37 crc kubenswrapper[4721]: I0130 21:59:37.064542 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5kbhd"] Jan 30 21:59:38 crc kubenswrapper[4721]: I0130 21:59:38.106010 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa298585-a353-4910-9cb2-6527745b5811" path="/var/lib/kubelet/pods/aa298585-a353-4910-9cb2-6527745b5811/volumes" Jan 30 21:59:38 crc kubenswrapper[4721]: I0130 21:59:38.824232 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" event={"ID":"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665","Type":"ContainerDied","Data":"df265b7a768404e0d14d2764f376da9572e093bdabb8653561dc2286ec4808a1"} Jan 30 21:59:38 crc kubenswrapper[4721]: I0130 21:59:38.824494 4721 generic.go:334] "Generic (PLEG): container finished" podID="ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" containerID="df265b7a768404e0d14d2764f376da9572e093bdabb8653561dc2286ec4808a1" exitCode=0 Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.419992 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.546195 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-ssh-key-openstack-edpm-ipam\") pod \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.546316 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qmn4\" (UniqueName: \"kubernetes.io/projected/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-kube-api-access-4qmn4\") pod \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.546434 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-inventory\") pod \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\" (UID: \"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665\") " Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.554630 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-kube-api-access-4qmn4" (OuterVolumeSpecName: "kube-api-access-4qmn4") pod "ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" (UID: "ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665"). InnerVolumeSpecName "kube-api-access-4qmn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.581413 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" (UID: "ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.581428 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-inventory" (OuterVolumeSpecName: "inventory") pod "ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" (UID: "ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.649087 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.649133 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qmn4\" (UniqueName: \"kubernetes.io/projected/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-kube-api-access-4qmn4\") on node \"crc\" DevicePath \"\"" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.649148 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.849232 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" event={"ID":"ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665","Type":"ContainerDied","Data":"717acacee0f78e76e6b6d42b71f5d768cee250f6530665f22e77895dcee8e65d"} Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.849290 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="717acacee0f78e76e6b6d42b71f5d768cee250f6530665f22e77895dcee8e65d" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.849375 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.933315 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj"] Jan 30 21:59:40 crc kubenswrapper[4721]: E0130 21:59:40.935350 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.935373 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.935635 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.936439 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.938645 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.939638 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.940264 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.940357 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 21:59:40 crc kubenswrapper[4721]: I0130 21:59:40.947215 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj"] Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.054545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jxwd\" (UniqueName: \"kubernetes.io/projected/48a8b210-aaab-46b3-8436-f4acab16a60a-kube-api-access-7jxwd\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.054739 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.054782 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.156749 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jxwd\" (UniqueName: \"kubernetes.io/projected/48a8b210-aaab-46b3-8436-f4acab16a60a-kube-api-access-7jxwd\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.156831 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.157053 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.162237 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.162473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.173873 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jxwd\" (UniqueName: \"kubernetes.io/projected/48a8b210-aaab-46b3-8436-f4acab16a60a-kube-api-access-7jxwd\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.256767 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 21:59:41 crc kubenswrapper[4721]: I0130 21:59:41.875834 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj"] Jan 30 21:59:42 crc kubenswrapper[4721]: I0130 21:59:42.866917 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" event={"ID":"48a8b210-aaab-46b3-8436-f4acab16a60a","Type":"ContainerStarted","Data":"ca4db86a14a8072b6d5587e569e677bd3dfd382a98c33ca539cc5a66f110c7e5"} Jan 30 21:59:42 crc kubenswrapper[4721]: I0130 21:59:42.867333 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" event={"ID":"48a8b210-aaab-46b3-8436-f4acab16a60a","Type":"ContainerStarted","Data":"5be95c4b6929df5a677fbe8f407c7c57e270aa618e5f1df8bba1e912b099f50f"} Jan 30 21:59:42 crc kubenswrapper[4721]: I0130 21:59:42.884612 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" podStartSLOduration=2.376072479 podStartE2EDuration="2.884593909s" podCreationTimestamp="2026-01-30 21:59:40 +0000 UTC" firstStartedPulling="2026-01-30 21:59:41.879452361 +0000 UTC m=+2570.671353607" lastFinishedPulling="2026-01-30 21:59:42.387973781 +0000 UTC m=+2571.179875037" observedRunningTime="2026-01-30 21:59:42.883822654 +0000 UTC m=+2571.675723910" watchObservedRunningTime="2026-01-30 21:59:42.884593909 +0000 UTC m=+2571.676495155" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.053967 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jjnhr"] Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.065783 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jjnhr"] Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.103639 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41614e72-cf2f-43c9-a879-f4c76ff277d5" path="/var/lib/kubelet/pods/41614e72-cf2f-43c9-a879-f4c76ff277d5/volumes" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.135679 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn"] Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.137370 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.153364 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn"] Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.189142 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.189262 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.300798 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-config-volume\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.300945 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fh96\" (UniqueName: \"kubernetes.io/projected/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-kube-api-access-2fh96\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.301036 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-secret-volume\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.403201 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-config-volume\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.403349 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fh96\" (UniqueName: \"kubernetes.io/projected/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-kube-api-access-2fh96\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.403451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-secret-volume\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.405070 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-config-volume\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.410985 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-secret-volume\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.422041 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fh96\" (UniqueName: \"kubernetes.io/projected/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-kube-api-access-2fh96\") pod \"collect-profiles-29496840-2cgvn\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.511594 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:00 crc kubenswrapper[4721]: I0130 22:00:00.981534 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn"] Jan 30 22:00:01 crc kubenswrapper[4721]: I0130 22:00:01.208697 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" event={"ID":"3e1a491a-e01e-4b70-8987-ddc3a7816b3d","Type":"ContainerStarted","Data":"452a37546a1cf6f7d31dbcd8f73ac4672fece7920a1d274dfdd7f342e4e6187a"} Jan 30 22:00:01 crc kubenswrapper[4721]: I0130 22:00:01.208746 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" event={"ID":"3e1a491a-e01e-4b70-8987-ddc3a7816b3d","Type":"ContainerStarted","Data":"3bc374c4b01ced0d18d0408c41084bb6503d223167ad8d22190414f0b32609d6"} Jan 30 22:00:01 crc kubenswrapper[4721]: I0130 22:00:01.229162 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" podStartSLOduration=1.229139309 podStartE2EDuration="1.229139309s" podCreationTimestamp="2026-01-30 22:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 22:00:01.223032916 +0000 UTC m=+2590.014934172" watchObservedRunningTime="2026-01-30 22:00:01.229139309 +0000 UTC m=+2590.021040555" Jan 30 22:00:02 crc kubenswrapper[4721]: I0130 22:00:02.226363 4721 generic.go:334] "Generic (PLEG): container finished" podID="3e1a491a-e01e-4b70-8987-ddc3a7816b3d" containerID="452a37546a1cf6f7d31dbcd8f73ac4672fece7920a1d274dfdd7f342e4e6187a" exitCode=0 Jan 30 22:00:02 crc kubenswrapper[4721]: I0130 22:00:02.226524 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" event={"ID":"3e1a491a-e01e-4b70-8987-ddc3a7816b3d","Type":"ContainerDied","Data":"452a37546a1cf6f7d31dbcd8f73ac4672fece7920a1d274dfdd7f342e4e6187a"} Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.660974 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.780759 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fh96\" (UniqueName: \"kubernetes.io/projected/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-kube-api-access-2fh96\") pod \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.780951 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-config-volume\") pod \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.781608 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-config-volume" (OuterVolumeSpecName: "config-volume") pod "3e1a491a-e01e-4b70-8987-ddc3a7816b3d" (UID: "3e1a491a-e01e-4b70-8987-ddc3a7816b3d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.781719 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-secret-volume\") pod \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\" (UID: \"3e1a491a-e01e-4b70-8987-ddc3a7816b3d\") " Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.782803 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.788481 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3e1a491a-e01e-4b70-8987-ddc3a7816b3d" (UID: "3e1a491a-e01e-4b70-8987-ddc3a7816b3d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.789407 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-kube-api-access-2fh96" (OuterVolumeSpecName: "kube-api-access-2fh96") pod "3e1a491a-e01e-4b70-8987-ddc3a7816b3d" (UID: "3e1a491a-e01e-4b70-8987-ddc3a7816b3d"). InnerVolumeSpecName "kube-api-access-2fh96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.885591 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:03 crc kubenswrapper[4721]: I0130 22:00:03.885962 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fh96\" (UniqueName: \"kubernetes.io/projected/3e1a491a-e01e-4b70-8987-ddc3a7816b3d-kube-api-access-2fh96\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:04 crc kubenswrapper[4721]: I0130 22:00:04.254228 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" event={"ID":"3e1a491a-e01e-4b70-8987-ddc3a7816b3d","Type":"ContainerDied","Data":"3bc374c4b01ced0d18d0408c41084bb6503d223167ad8d22190414f0b32609d6"} Jan 30 22:00:04 crc kubenswrapper[4721]: I0130 22:00:04.254529 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bc374c4b01ced0d18d0408c41084bb6503d223167ad8d22190414f0b32609d6" Jan 30 22:00:04 crc kubenswrapper[4721]: I0130 22:00:04.254314 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496840-2cgvn" Jan 30 22:00:04 crc kubenswrapper[4721]: I0130 22:00:04.290778 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk"] Jan 30 22:00:04 crc kubenswrapper[4721]: I0130 22:00:04.300647 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496795-h48jk"] Jan 30 22:00:06 crc kubenswrapper[4721]: I0130 22:00:06.107406 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57505fbf-a7f7-45ff-91bb-f3463567721e" path="/var/lib/kubelet/pods/57505fbf-a7f7-45ff-91bb-f3463567721e/volumes" Jan 30 22:00:08 crc kubenswrapper[4721]: I0130 22:00:08.032769 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-gr825"] Jan 30 22:00:08 crc kubenswrapper[4721]: I0130 22:00:08.043561 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-gr825"] Jan 30 22:00:08 crc kubenswrapper[4721]: I0130 22:00:08.105261 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76" path="/var/lib/kubelet/pods/bb9a9bbe-1ae1-4f8a-80a2-97e181d3fd76/volumes" Jan 30 22:00:09 crc kubenswrapper[4721]: I0130 22:00:09.533640 4721 scope.go:117] "RemoveContainer" containerID="87f6096ead028af7ae8bd76949059a133211f3079c85533a9ff2fc619b0437df" Jan 30 22:00:09 crc kubenswrapper[4721]: I0130 22:00:09.598367 4721 scope.go:117] "RemoveContainer" containerID="77b05f5f7abd17e94a5cbc6eb3c197cabe6aff5802e6809e1473bb0d66d5287e" Jan 30 22:00:09 crc kubenswrapper[4721]: I0130 22:00:09.675855 4721 scope.go:117] "RemoveContainer" containerID="b412d308c94e402ce85900f21e5966c4b248167bddfe3a2f59825b7513dd9036" Jan 30 22:00:09 crc kubenswrapper[4721]: I0130 22:00:09.733725 4721 scope.go:117] "RemoveContainer" containerID="e4ef169e954c73d5d9db72483bd974c94e99b9061ee448c7ede797d1a4c31731" Jan 30 22:00:13 crc kubenswrapper[4721]: I0130 22:00:13.052844 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-k4phl"] Jan 30 22:00:13 crc kubenswrapper[4721]: I0130 22:00:13.062090 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-k4phl"] Jan 30 22:00:14 crc kubenswrapper[4721]: I0130 22:00:14.107469 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="710ef32f-2c64-4aea-a0d4-ea18b41e4f10" path="/var/lib/kubelet/pods/710ef32f-2c64-4aea-a0d4-ea18b41e4f10/volumes" Jan 30 22:00:41 crc kubenswrapper[4721]: I0130 22:00:41.654805 4721 generic.go:334] "Generic (PLEG): container finished" podID="48a8b210-aaab-46b3-8436-f4acab16a60a" containerID="ca4db86a14a8072b6d5587e569e677bd3dfd382a98c33ca539cc5a66f110c7e5" exitCode=0 Jan 30 22:00:41 crc kubenswrapper[4721]: I0130 22:00:41.654994 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" event={"ID":"48a8b210-aaab-46b3-8436-f4acab16a60a","Type":"ContainerDied","Data":"ca4db86a14a8072b6d5587e569e677bd3dfd382a98c33ca539cc5a66f110c7e5"} Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.206595 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.288982 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-inventory\") pod \"48a8b210-aaab-46b3-8436-f4acab16a60a\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.289127 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-ssh-key-openstack-edpm-ipam\") pod \"48a8b210-aaab-46b3-8436-f4acab16a60a\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.289384 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jxwd\" (UniqueName: \"kubernetes.io/projected/48a8b210-aaab-46b3-8436-f4acab16a60a-kube-api-access-7jxwd\") pod \"48a8b210-aaab-46b3-8436-f4acab16a60a\" (UID: \"48a8b210-aaab-46b3-8436-f4acab16a60a\") " Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.295178 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a8b210-aaab-46b3-8436-f4acab16a60a-kube-api-access-7jxwd" (OuterVolumeSpecName: "kube-api-access-7jxwd") pod "48a8b210-aaab-46b3-8436-f4acab16a60a" (UID: "48a8b210-aaab-46b3-8436-f4acab16a60a"). InnerVolumeSpecName "kube-api-access-7jxwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.317791 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "48a8b210-aaab-46b3-8436-f4acab16a60a" (UID: "48a8b210-aaab-46b3-8436-f4acab16a60a"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.319355 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-inventory" (OuterVolumeSpecName: "inventory") pod "48a8b210-aaab-46b3-8436-f4acab16a60a" (UID: "48a8b210-aaab-46b3-8436-f4acab16a60a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.392424 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.392471 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/48a8b210-aaab-46b3-8436-f4acab16a60a-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.392486 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jxwd\" (UniqueName: \"kubernetes.io/projected/48a8b210-aaab-46b3-8436-f4acab16a60a-kube-api-access-7jxwd\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.689400 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" event={"ID":"48a8b210-aaab-46b3-8436-f4acab16a60a","Type":"ContainerDied","Data":"5be95c4b6929df5a677fbe8f407c7c57e270aa618e5f1df8bba1e912b099f50f"} Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.689440 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be95c4b6929df5a677fbe8f407c7c57e270aa618e5f1df8bba1e912b099f50f" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.689486 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.767551 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m"] Jan 30 22:00:43 crc kubenswrapper[4721]: E0130 22:00:43.767996 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e1a491a-e01e-4b70-8987-ddc3a7816b3d" containerName="collect-profiles" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.768016 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e1a491a-e01e-4b70-8987-ddc3a7816b3d" containerName="collect-profiles" Jan 30 22:00:43 crc kubenswrapper[4721]: E0130 22:00:43.768042 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a8b210-aaab-46b3-8436-f4acab16a60a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.768048 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a8b210-aaab-46b3-8436-f4acab16a60a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.768287 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e1a491a-e01e-4b70-8987-ddc3a7816b3d" containerName="collect-profiles" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.768342 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a8b210-aaab-46b3-8436-f4acab16a60a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.769282 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.771774 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.774702 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.774940 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.775526 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.782710 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m"] Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.903217 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4dm2\" (UniqueName: \"kubernetes.io/projected/8a1c37f6-c659-4344-ad91-49f56d8fd843-kube-api-access-g4dm2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.903308 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:43 crc kubenswrapper[4721]: I0130 22:00:43.903386 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.004812 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4dm2\" (UniqueName: \"kubernetes.io/projected/8a1c37f6-c659-4344-ad91-49f56d8fd843-kube-api-access-g4dm2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.005166 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.005235 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.010260 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.011135 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.024220 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4dm2\" (UniqueName: \"kubernetes.io/projected/8a1c37f6-c659-4344-ad91-49f56d8fd843-kube-api-access-g4dm2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5np9m\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.124506 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.652064 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m"] Jan 30 22:00:44 crc kubenswrapper[4721]: W0130 22:00:44.660751 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a1c37f6_c659_4344_ad91_49f56d8fd843.slice/crio-79ed818cd4ed8d51bf8c4b782179ce0c9d9471a0fc44d75ef918aa31cd9fccaa WatchSource:0}: Error finding container 79ed818cd4ed8d51bf8c4b782179ce0c9d9471a0fc44d75ef918aa31cd9fccaa: Status 404 returned error can't find the container with id 79ed818cd4ed8d51bf8c4b782179ce0c9d9471a0fc44d75ef918aa31cd9fccaa Jan 30 22:00:44 crc kubenswrapper[4721]: I0130 22:00:44.701329 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" event={"ID":"8a1c37f6-c659-4344-ad91-49f56d8fd843","Type":"ContainerStarted","Data":"79ed818cd4ed8d51bf8c4b782179ce0c9d9471a0fc44d75ef918aa31cd9fccaa"} Jan 30 22:00:45 crc kubenswrapper[4721]: I0130 22:00:45.710815 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" event={"ID":"8a1c37f6-c659-4344-ad91-49f56d8fd843","Type":"ContainerStarted","Data":"e8730536c5798385776b65c6b9cf53d03c7b7cd2c967694e43eae5b94c41f45c"} Jan 30 22:00:45 crc kubenswrapper[4721]: I0130 22:00:45.753257 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" podStartSLOduration=2.111593414 podStartE2EDuration="2.753234269s" podCreationTimestamp="2026-01-30 22:00:43 +0000 UTC" firstStartedPulling="2026-01-30 22:00:44.663746258 +0000 UTC m=+2633.455647524" lastFinishedPulling="2026-01-30 22:00:45.305387133 +0000 UTC m=+2634.097288379" observedRunningTime="2026-01-30 22:00:45.747962083 +0000 UTC m=+2634.539863319" watchObservedRunningTime="2026-01-30 22:00:45.753234269 +0000 UTC m=+2634.545135515" Jan 30 22:00:50 crc kubenswrapper[4721]: I0130 22:00:50.762899 4721 generic.go:334] "Generic (PLEG): container finished" podID="8a1c37f6-c659-4344-ad91-49f56d8fd843" containerID="e8730536c5798385776b65c6b9cf53d03c7b7cd2c967694e43eae5b94c41f45c" exitCode=0 Jan 30 22:00:50 crc kubenswrapper[4721]: I0130 22:00:50.763004 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" event={"ID":"8a1c37f6-c659-4344-ad91-49f56d8fd843","Type":"ContainerDied","Data":"e8730536c5798385776b65c6b9cf53d03c7b7cd2c967694e43eae5b94c41f45c"} Jan 30 22:00:51 crc kubenswrapper[4721]: I0130 22:00:51.046220 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-jbqqk"] Jan 30 22:00:51 crc kubenswrapper[4721]: I0130 22:00:51.055714 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-jbqqk"] Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.121716 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46c7155a-444a-42b9-9e5d-183998bc5d22" path="/var/lib/kubelet/pods/46c7155a-444a-42b9-9e5d-183998bc5d22/volumes" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.448472 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.492533 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-ssh-key-openstack-edpm-ipam\") pod \"8a1c37f6-c659-4344-ad91-49f56d8fd843\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.492806 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-inventory\") pod \"8a1c37f6-c659-4344-ad91-49f56d8fd843\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.492909 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4dm2\" (UniqueName: \"kubernetes.io/projected/8a1c37f6-c659-4344-ad91-49f56d8fd843-kube-api-access-g4dm2\") pod \"8a1c37f6-c659-4344-ad91-49f56d8fd843\" (UID: \"8a1c37f6-c659-4344-ad91-49f56d8fd843\") " Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.787994 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" event={"ID":"8a1c37f6-c659-4344-ad91-49f56d8fd843","Type":"ContainerDied","Data":"79ed818cd4ed8d51bf8c4b782179ce0c9d9471a0fc44d75ef918aa31cd9fccaa"} Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.788049 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79ed818cd4ed8d51bf8c4b782179ce0c9d9471a0fc44d75ef918aa31cd9fccaa" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.788055 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5np9m" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.901120 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm"] Jan 30 22:00:52 crc kubenswrapper[4721]: E0130 22:00:52.902110 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a1c37f6-c659-4344-ad91-49f56d8fd843" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.902135 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a1c37f6-c659-4344-ad91-49f56d8fd843" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.902473 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a1c37f6-c659-4344-ad91-49f56d8fd843" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.903560 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:52 crc kubenswrapper[4721]: I0130 22:00:52.911480 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm"] Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.004167 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.004223 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.004720 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nft9f\" (UniqueName: \"kubernetes.io/projected/d1bfbef9-b785-4687-a0fa-471a6b4b6957-kube-api-access-nft9f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.107431 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.107508 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.107629 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nft9f\" (UniqueName: \"kubernetes.io/projected/d1bfbef9-b785-4687-a0fa-471a6b4b6957-kube-api-access-nft9f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.112387 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.115375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.126002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nft9f\" (UniqueName: \"kubernetes.io/projected/d1bfbef9-b785-4687-a0fa-471a6b4b6957-kube-api-access-nft9f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-p4zrm\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.205231 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a1c37f6-c659-4344-ad91-49f56d8fd843-kube-api-access-g4dm2" (OuterVolumeSpecName: "kube-api-access-g4dm2") pod "8a1c37f6-c659-4344-ad91-49f56d8fd843" (UID: "8a1c37f6-c659-4344-ad91-49f56d8fd843"). InnerVolumeSpecName "kube-api-access-g4dm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.209128 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8a1c37f6-c659-4344-ad91-49f56d8fd843" (UID: "8a1c37f6-c659-4344-ad91-49f56d8fd843"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.209818 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4dm2\" (UniqueName: \"kubernetes.io/projected/8a1c37f6-c659-4344-ad91-49f56d8fd843-kube-api-access-g4dm2\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.209846 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.212577 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-inventory" (OuterVolumeSpecName: "inventory") pod "8a1c37f6-c659-4344-ad91-49f56d8fd843" (UID: "8a1c37f6-c659-4344-ad91-49f56d8fd843"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.235792 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.312210 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a1c37f6-c659-4344-ad91-49f56d8fd843-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:00:53 crc kubenswrapper[4721]: I0130 22:00:53.799235 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm"] Jan 30 22:00:54 crc kubenswrapper[4721]: I0130 22:00:54.806823 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" event={"ID":"d1bfbef9-b785-4687-a0fa-471a6b4b6957","Type":"ContainerStarted","Data":"c8ffd5c4c50255e9cc8465ad7ff7e26d93deb90b45d86296b5b89315ec6a6f86"} Jan 30 22:00:54 crc kubenswrapper[4721]: I0130 22:00:54.807414 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" event={"ID":"d1bfbef9-b785-4687-a0fa-471a6b4b6957","Type":"ContainerStarted","Data":"6aee77f79dcf0de163a8272a1d3b6023223068adadeff6d5bfc8d700b5d45974"} Jan 30 22:00:54 crc kubenswrapper[4721]: I0130 22:00:54.837244 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" podStartSLOduration=2.398795866 podStartE2EDuration="2.837224605s" podCreationTimestamp="2026-01-30 22:00:52 +0000 UTC" firstStartedPulling="2026-01-30 22:00:53.802420161 +0000 UTC m=+2642.594321407" lastFinishedPulling="2026-01-30 22:00:54.24084888 +0000 UTC m=+2643.032750146" observedRunningTime="2026-01-30 22:00:54.828015556 +0000 UTC m=+2643.619916812" watchObservedRunningTime="2026-01-30 22:00:54.837224605 +0000 UTC m=+2643.629125861" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.133623 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29496841-6wgsv"] Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.135522 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.156137 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29496841-6wgsv"] Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.175639 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-config-data\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.175705 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-fernet-keys\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.175761 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq5lb\" (UniqueName: \"kubernetes.io/projected/33f22249-bfaa-4818-a56c-2d0192a8bef6-kube-api-access-sq5lb\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.175842 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-combined-ca-bundle\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.278062 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-combined-ca-bundle\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.278455 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-config-data\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.278558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-fernet-keys\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.278673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq5lb\" (UniqueName: \"kubernetes.io/projected/33f22249-bfaa-4818-a56c-2d0192a8bef6-kube-api-access-sq5lb\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.284617 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-combined-ca-bundle\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.285050 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-fernet-keys\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.295688 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-config-data\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.301880 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq5lb\" (UniqueName: \"kubernetes.io/projected/33f22249-bfaa-4818-a56c-2d0192a8bef6-kube-api-access-sq5lb\") pod \"keystone-cron-29496841-6wgsv\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.500283 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:00 crc kubenswrapper[4721]: I0130 22:01:00.976126 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29496841-6wgsv"] Jan 30 22:01:01 crc kubenswrapper[4721]: I0130 22:01:01.889236 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29496841-6wgsv" event={"ID":"33f22249-bfaa-4818-a56c-2d0192a8bef6","Type":"ContainerStarted","Data":"5c98906e80838e54a6044b877faaa4877a38afd4126ac312ddeac827468ae965"} Jan 30 22:01:01 crc kubenswrapper[4721]: I0130 22:01:01.889543 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29496841-6wgsv" event={"ID":"33f22249-bfaa-4818-a56c-2d0192a8bef6","Type":"ContainerStarted","Data":"e68693a9e8ccccaf08cdd6df3b86c1c1b9e1c3552dea5e5adb9658d5f5c95047"} Jan 30 22:01:01 crc kubenswrapper[4721]: I0130 22:01:01.919929 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29496841-6wgsv" podStartSLOduration=1.919910598 podStartE2EDuration="1.919910598s" podCreationTimestamp="2026-01-30 22:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 22:01:01.91142353 +0000 UTC m=+2650.703324766" watchObservedRunningTime="2026-01-30 22:01:01.919910598 +0000 UTC m=+2650.711811864" Jan 30 22:01:04 crc kubenswrapper[4721]: I0130 22:01:04.920590 4721 generic.go:334] "Generic (PLEG): container finished" podID="33f22249-bfaa-4818-a56c-2d0192a8bef6" containerID="5c98906e80838e54a6044b877faaa4877a38afd4126ac312ddeac827468ae965" exitCode=0 Jan 30 22:01:04 crc kubenswrapper[4721]: I0130 22:01:04.920714 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29496841-6wgsv" event={"ID":"33f22249-bfaa-4818-a56c-2d0192a8bef6","Type":"ContainerDied","Data":"5c98906e80838e54a6044b877faaa4877a38afd4126ac312ddeac827468ae965"} Jan 30 22:01:05 crc kubenswrapper[4721]: I0130 22:01:05.038919 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-sdf5s"] Jan 30 22:01:05 crc kubenswrapper[4721]: I0130 22:01:05.049960 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-sdf5s"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.039915 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-7d3d-account-create-update-ks6zf"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.060740 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-tk9hs"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.073955 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-3caa-account-create-update-hwv8l"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.081481 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-7d3d-account-create-update-ks6zf"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.116877 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a477f1e-4acc-47ef-ad6a-4a385e57a383" path="/var/lib/kubelet/pods/4a477f1e-4acc-47ef-ad6a-4a385e57a383/volumes" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.117936 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4" path="/var/lib/kubelet/pods/ec6d68b0-6e7a-41e4-b50c-21b40f07d8c4/volumes" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.118875 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-3caa-account-create-update-hwv8l"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.118908 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-tk9hs"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.131960 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-7j762"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.162655 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0bc8-account-create-update-8pnn2"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.174345 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-7j762"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.185793 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0bc8-account-create-update-8pnn2"] Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.474695 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.526994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq5lb\" (UniqueName: \"kubernetes.io/projected/33f22249-bfaa-4818-a56c-2d0192a8bef6-kube-api-access-sq5lb\") pod \"33f22249-bfaa-4818-a56c-2d0192a8bef6\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.527411 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-combined-ca-bundle\") pod \"33f22249-bfaa-4818-a56c-2d0192a8bef6\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.527620 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-fernet-keys\") pod \"33f22249-bfaa-4818-a56c-2d0192a8bef6\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.527781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-config-data\") pod \"33f22249-bfaa-4818-a56c-2d0192a8bef6\" (UID: \"33f22249-bfaa-4818-a56c-2d0192a8bef6\") " Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.561382 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33f22249-bfaa-4818-a56c-2d0192a8bef6-kube-api-access-sq5lb" (OuterVolumeSpecName: "kube-api-access-sq5lb") pod "33f22249-bfaa-4818-a56c-2d0192a8bef6" (UID: "33f22249-bfaa-4818-a56c-2d0192a8bef6"). InnerVolumeSpecName "kube-api-access-sq5lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.562160 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "33f22249-bfaa-4818-a56c-2d0192a8bef6" (UID: "33f22249-bfaa-4818-a56c-2d0192a8bef6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.590728 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33f22249-bfaa-4818-a56c-2d0192a8bef6" (UID: "33f22249-bfaa-4818-a56c-2d0192a8bef6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.636792 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq5lb\" (UniqueName: \"kubernetes.io/projected/33f22249-bfaa-4818-a56c-2d0192a8bef6-kube-api-access-sq5lb\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.636855 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.636866 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.715866 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-config-data" (OuterVolumeSpecName: "config-data") pod "33f22249-bfaa-4818-a56c-2d0192a8bef6" (UID: "33f22249-bfaa-4818-a56c-2d0192a8bef6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.739087 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f22249-bfaa-4818-a56c-2d0192a8bef6-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.941446 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29496841-6wgsv" event={"ID":"33f22249-bfaa-4818-a56c-2d0192a8bef6","Type":"ContainerDied","Data":"e68693a9e8ccccaf08cdd6df3b86c1c1b9e1c3552dea5e5adb9658d5f5c95047"} Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.941761 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e68693a9e8ccccaf08cdd6df3b86c1c1b9e1c3552dea5e5adb9658d5f5c95047" Jan 30 22:01:06 crc kubenswrapper[4721]: I0130 22:01:06.941718 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29496841-6wgsv" Jan 30 22:01:08 crc kubenswrapper[4721]: I0130 22:01:08.113473 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ed707d1-61ef-47a3-b1ae-71e81502a76d" path="/var/lib/kubelet/pods/3ed707d1-61ef-47a3-b1ae-71e81502a76d/volumes" Jan 30 22:01:08 crc kubenswrapper[4721]: I0130 22:01:08.114065 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e94b83d-72f0-4926-ba44-97d328e9088e" path="/var/lib/kubelet/pods/5e94b83d-72f0-4926-ba44-97d328e9088e/volumes" Jan 30 22:01:08 crc kubenswrapper[4721]: I0130 22:01:08.119339 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e87c63f-6379-491e-9efe-7255f5ed3ed0" path="/var/lib/kubelet/pods/7e87c63f-6379-491e-9efe-7255f5ed3ed0/volumes" Jan 30 22:01:08 crc kubenswrapper[4721]: I0130 22:01:08.120197 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8" path="/var/lib/kubelet/pods/ac8e39a7-f76c-4c6e-b2bf-b526ce8fe0b8/volumes" Jan 30 22:01:09 crc kubenswrapper[4721]: I0130 22:01:09.893341 4721 scope.go:117] "RemoveContainer" containerID="3069cea93ec24c48af1f2dde19d05a825cae0a87d92690aba9bf74208081f5d7" Jan 30 22:01:09 crc kubenswrapper[4721]: I0130 22:01:09.923913 4721 scope.go:117] "RemoveContainer" containerID="f891d8e14c88d24347597dcb55ef6d58b0c4b69b8bb11109d04cb8f32a0f55c9" Jan 30 22:01:09 crc kubenswrapper[4721]: I0130 22:01:09.989286 4721 scope.go:117] "RemoveContainer" containerID="608d96ad0b85b91fb46b12fb868f1a60f508ed4a4382de60affb327f786c3aa7" Jan 30 22:01:10 crc kubenswrapper[4721]: I0130 22:01:10.058844 4721 scope.go:117] "RemoveContainer" containerID="e642bdec5fe63d6c708cb4bbee967ed89c4c41bde2426c1c9f21709b26519297" Jan 30 22:01:10 crc kubenswrapper[4721]: I0130 22:01:10.085590 4721 scope.go:117] "RemoveContainer" containerID="99d5382d8e9e84fb66f5e1b07e327cd096750ffc3c147ba0c8c8cdaea6b96a15" Jan 30 22:01:10 crc kubenswrapper[4721]: I0130 22:01:10.135240 4721 scope.go:117] "RemoveContainer" containerID="d05507e9c5a5f53858a3c3f1e65f4b32a69c714ffc8432542b7ea9721ce9a8c9" Jan 30 22:01:10 crc kubenswrapper[4721]: I0130 22:01:10.182340 4721 scope.go:117] "RemoveContainer" containerID="ca7aed5bf67815d1ae51471ebe7450ee976b5e2dfd0f0c7ef3e3b8ea9159b7e8" Jan 30 22:01:10 crc kubenswrapper[4721]: I0130 22:01:10.223013 4721 scope.go:117] "RemoveContainer" containerID="e7c8ddee0178ee93ee7168f5f7f172c1b917ef632e72ff56741a2da50499bef6" Jan 30 22:01:26 crc kubenswrapper[4721]: I0130 22:01:26.144145 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1bfbef9-b785-4687-a0fa-471a6b4b6957" containerID="c8ffd5c4c50255e9cc8465ad7ff7e26d93deb90b45d86296b5b89315ec6a6f86" exitCode=0 Jan 30 22:01:26 crc kubenswrapper[4721]: I0130 22:01:26.144241 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" event={"ID":"d1bfbef9-b785-4687-a0fa-471a6b4b6957","Type":"ContainerDied","Data":"c8ffd5c4c50255e9cc8465ad7ff7e26d93deb90b45d86296b5b89315ec6a6f86"} Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.655525 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.736224 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-inventory\") pod \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.736713 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nft9f\" (UniqueName: \"kubernetes.io/projected/d1bfbef9-b785-4687-a0fa-471a6b4b6957-kube-api-access-nft9f\") pod \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.736931 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-ssh-key-openstack-edpm-ipam\") pod \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\" (UID: \"d1bfbef9-b785-4687-a0fa-471a6b4b6957\") " Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.743188 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1bfbef9-b785-4687-a0fa-471a6b4b6957-kube-api-access-nft9f" (OuterVolumeSpecName: "kube-api-access-nft9f") pod "d1bfbef9-b785-4687-a0fa-471a6b4b6957" (UID: "d1bfbef9-b785-4687-a0fa-471a6b4b6957"). InnerVolumeSpecName "kube-api-access-nft9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.764961 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d1bfbef9-b785-4687-a0fa-471a6b4b6957" (UID: "d1bfbef9-b785-4687-a0fa-471a6b4b6957"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.772058 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-inventory" (OuterVolumeSpecName: "inventory") pod "d1bfbef9-b785-4687-a0fa-471a6b4b6957" (UID: "d1bfbef9-b785-4687-a0fa-471a6b4b6957"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.840863 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.840906 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nft9f\" (UniqueName: \"kubernetes.io/projected/d1bfbef9-b785-4687-a0fa-471a6b4b6957-kube-api-access-nft9f\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:27 crc kubenswrapper[4721]: I0130 22:01:27.840916 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d1bfbef9-b785-4687-a0fa-471a6b4b6957-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.166468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" event={"ID":"d1bfbef9-b785-4687-a0fa-471a6b4b6957","Type":"ContainerDied","Data":"6aee77f79dcf0de163a8272a1d3b6023223068adadeff6d5bfc8d700b5d45974"} Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.166517 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6aee77f79dcf0de163a8272a1d3b6023223068adadeff6d5bfc8d700b5d45974" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.166560 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-p4zrm" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.264899 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk"] Jan 30 22:01:28 crc kubenswrapper[4721]: E0130 22:01:28.265477 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f22249-bfaa-4818-a56c-2d0192a8bef6" containerName="keystone-cron" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.265497 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f22249-bfaa-4818-a56c-2d0192a8bef6" containerName="keystone-cron" Jan 30 22:01:28 crc kubenswrapper[4721]: E0130 22:01:28.265541 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1bfbef9-b785-4687-a0fa-471a6b4b6957" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.265549 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1bfbef9-b785-4687-a0fa-471a6b4b6957" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.265752 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1bfbef9-b785-4687-a0fa-471a6b4b6957" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.265781 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="33f22249-bfaa-4818-a56c-2d0192a8bef6" containerName="keystone-cron" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.266604 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.268494 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.269149 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.269367 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.269496 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.276010 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk"] Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.352784 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76kf7\" (UniqueName: \"kubernetes.io/projected/aeb46845-60c0-48ae-960e-4f138a1caf5e-kube-api-access-76kf7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.352844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.353044 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.455741 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76kf7\" (UniqueName: \"kubernetes.io/projected/aeb46845-60c0-48ae-960e-4f138a1caf5e-kube-api-access-76kf7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.455817 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.455886 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.459605 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.460212 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.474922 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76kf7\" (UniqueName: \"kubernetes.io/projected/aeb46845-60c0-48ae-960e-4f138a1caf5e-kube-api-access-76kf7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:28 crc kubenswrapper[4721]: I0130 22:01:28.592365 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:01:29 crc kubenswrapper[4721]: I0130 22:01:29.162121 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk"] Jan 30 22:01:29 crc kubenswrapper[4721]: I0130 22:01:29.177588 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" event={"ID":"aeb46845-60c0-48ae-960e-4f138a1caf5e","Type":"ContainerStarted","Data":"80e84d9d11129fb693698954c5479770a0d22cd67a4df54defbbbdad84a0fed6"} Jan 30 22:01:29 crc kubenswrapper[4721]: I0130 22:01:29.448388 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:01:29 crc kubenswrapper[4721]: I0130 22:01:29.448493 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:01:30 crc kubenswrapper[4721]: I0130 22:01:30.194500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" event={"ID":"aeb46845-60c0-48ae-960e-4f138a1caf5e","Type":"ContainerStarted","Data":"ffff92c56ec7c41ba9d297c30b4a3a3a397856e074fdb6c913b643d02ec2ad6c"} Jan 30 22:01:30 crc kubenswrapper[4721]: I0130 22:01:30.213984 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" podStartSLOduration=1.659992605 podStartE2EDuration="2.213958902s" podCreationTimestamp="2026-01-30 22:01:28 +0000 UTC" firstStartedPulling="2026-01-30 22:01:29.158603908 +0000 UTC m=+2677.950505154" lastFinishedPulling="2026-01-30 22:01:29.712570185 +0000 UTC m=+2678.504471451" observedRunningTime="2026-01-30 22:01:30.213040202 +0000 UTC m=+2679.004941448" watchObservedRunningTime="2026-01-30 22:01:30.213958902 +0000 UTC m=+2679.005860148" Jan 30 22:01:41 crc kubenswrapper[4721]: I0130 22:01:41.044895 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f2sbl"] Jan 30 22:01:41 crc kubenswrapper[4721]: I0130 22:01:41.054382 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f2sbl"] Jan 30 22:01:42 crc kubenswrapper[4721]: I0130 22:01:42.104903 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33591053-d381-4cec-bd2c-a9ddc8c4778c" path="/var/lib/kubelet/pods/33591053-d381-4cec-bd2c-a9ddc8c4778c/volumes" Jan 30 22:01:59 crc kubenswrapper[4721]: I0130 22:01:59.449221 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:01:59 crc kubenswrapper[4721]: I0130 22:01:59.449822 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:02:10 crc kubenswrapper[4721]: I0130 22:02:10.423533 4721 scope.go:117] "RemoveContainer" containerID="b5dbbe3f5bdbfd64d50612e2745d3b9fc3dc1928748705cee17ba01198bc8c18" Jan 30 22:02:11 crc kubenswrapper[4721]: I0130 22:02:11.052721 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-hs42m"] Jan 30 22:02:11 crc kubenswrapper[4721]: I0130 22:02:11.066314 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-hs42m"] Jan 30 22:02:11 crc kubenswrapper[4721]: I0130 22:02:11.597058 4721 generic.go:334] "Generic (PLEG): container finished" podID="aeb46845-60c0-48ae-960e-4f138a1caf5e" containerID="ffff92c56ec7c41ba9d297c30b4a3a3a397856e074fdb6c913b643d02ec2ad6c" exitCode=0 Jan 30 22:02:11 crc kubenswrapper[4721]: I0130 22:02:11.597113 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" event={"ID":"aeb46845-60c0-48ae-960e-4f138a1caf5e","Type":"ContainerDied","Data":"ffff92c56ec7c41ba9d297c30b4a3a3a397856e074fdb6c913b643d02ec2ad6c"} Jan 30 22:02:12 crc kubenswrapper[4721]: I0130 22:02:12.113165 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c66f646-071a-42e8-b551-18c8fd4c6df4" path="/var/lib/kubelet/pods/8c66f646-071a-42e8-b551-18c8fd4c6df4/volumes" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.244999 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.331133 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-inventory\") pod \"aeb46845-60c0-48ae-960e-4f138a1caf5e\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.331201 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76kf7\" (UniqueName: \"kubernetes.io/projected/aeb46845-60c0-48ae-960e-4f138a1caf5e-kube-api-access-76kf7\") pod \"aeb46845-60c0-48ae-960e-4f138a1caf5e\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.331421 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-ssh-key-openstack-edpm-ipam\") pod \"aeb46845-60c0-48ae-960e-4f138a1caf5e\" (UID: \"aeb46845-60c0-48ae-960e-4f138a1caf5e\") " Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.336469 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb46845-60c0-48ae-960e-4f138a1caf5e-kube-api-access-76kf7" (OuterVolumeSpecName: "kube-api-access-76kf7") pod "aeb46845-60c0-48ae-960e-4f138a1caf5e" (UID: "aeb46845-60c0-48ae-960e-4f138a1caf5e"). InnerVolumeSpecName "kube-api-access-76kf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.359559 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "aeb46845-60c0-48ae-960e-4f138a1caf5e" (UID: "aeb46845-60c0-48ae-960e-4f138a1caf5e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.361548 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-inventory" (OuterVolumeSpecName: "inventory") pod "aeb46845-60c0-48ae-960e-4f138a1caf5e" (UID: "aeb46845-60c0-48ae-960e-4f138a1caf5e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.433637 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.433675 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeb46845-60c0-48ae-960e-4f138a1caf5e-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.433686 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76kf7\" (UniqueName: \"kubernetes.io/projected/aeb46845-60c0-48ae-960e-4f138a1caf5e-kube-api-access-76kf7\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.616193 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" event={"ID":"aeb46845-60c0-48ae-960e-4f138a1caf5e","Type":"ContainerDied","Data":"80e84d9d11129fb693698954c5479770a0d22cd67a4df54defbbbdad84a0fed6"} Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.616244 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80e84d9d11129fb693698954c5479770a0d22cd67a4df54defbbbdad84a0fed6" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.616329 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.705123 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4989x"] Jan 30 22:02:13 crc kubenswrapper[4721]: E0130 22:02:13.705638 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb46845-60c0-48ae-960e-4f138a1caf5e" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.705660 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb46845-60c0-48ae-960e-4f138a1caf5e" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.705939 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb46845-60c0-48ae-960e-4f138a1caf5e" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.706935 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.708770 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.708873 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.711671 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.711726 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.713640 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4989x"] Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.739106 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.739177 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd69s\" (UniqueName: \"kubernetes.io/projected/c878af65-d3fd-4eae-9818-a30e27c363ec-kube-api-access-zd69s\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.739224 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.841606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.842039 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd69s\" (UniqueName: \"kubernetes.io/projected/c878af65-d3fd-4eae-9818-a30e27c363ec-kube-api-access-zd69s\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.842117 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.847724 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.855943 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:13 crc kubenswrapper[4721]: I0130 22:02:13.867907 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd69s\" (UniqueName: \"kubernetes.io/projected/c878af65-d3fd-4eae-9818-a30e27c363ec-kube-api-access-zd69s\") pod \"ssh-known-hosts-edpm-deployment-4989x\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:14 crc kubenswrapper[4721]: I0130 22:02:14.035065 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:14 crc kubenswrapper[4721]: I0130 22:02:14.653013 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4989x"] Jan 30 22:02:15 crc kubenswrapper[4721]: I0130 22:02:15.644388 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" event={"ID":"c878af65-d3fd-4eae-9818-a30e27c363ec","Type":"ContainerStarted","Data":"21d5474379f4632ba37b4c9031aa003c74f9549610318062d3c3939a89392ed4"} Jan 30 22:02:16 crc kubenswrapper[4721]: I0130 22:02:16.076977 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-56v9p"] Jan 30 22:02:16 crc kubenswrapper[4721]: I0130 22:02:16.105228 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-56v9p"] Jan 30 22:02:18 crc kubenswrapper[4721]: I0130 22:02:18.106064 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5db7a32-67d1-48b4-802f-0711a9e32eb2" path="/var/lib/kubelet/pods/f5db7a32-67d1-48b4-802f-0711a9e32eb2/volumes" Jan 30 22:02:19 crc kubenswrapper[4721]: I0130 22:02:19.690161 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" event={"ID":"c878af65-d3fd-4eae-9818-a30e27c363ec","Type":"ContainerStarted","Data":"d2258c28a0a07b5b43fa253372e8757e7b617821e6042c24964d7d6cbda0e18a"} Jan 30 22:02:19 crc kubenswrapper[4721]: I0130 22:02:19.711597 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" podStartSLOduration=2.216650131 podStartE2EDuration="6.711545256s" podCreationTimestamp="2026-01-30 22:02:13 +0000 UTC" firstStartedPulling="2026-01-30 22:02:14.637983825 +0000 UTC m=+2723.429885071" lastFinishedPulling="2026-01-30 22:02:19.13287895 +0000 UTC m=+2727.924780196" observedRunningTime="2026-01-30 22:02:19.707647344 +0000 UTC m=+2728.499548620" watchObservedRunningTime="2026-01-30 22:02:19.711545256 +0000 UTC m=+2728.503446502" Jan 30 22:02:25 crc kubenswrapper[4721]: I0130 22:02:25.765330 4721 generic.go:334] "Generic (PLEG): container finished" podID="c878af65-d3fd-4eae-9818-a30e27c363ec" containerID="d2258c28a0a07b5b43fa253372e8757e7b617821e6042c24964d7d6cbda0e18a" exitCode=0 Jan 30 22:02:25 crc kubenswrapper[4721]: I0130 22:02:25.765549 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" event={"ID":"c878af65-d3fd-4eae-9818-a30e27c363ec","Type":"ContainerDied","Data":"d2258c28a0a07b5b43fa253372e8757e7b617821e6042c24964d7d6cbda0e18a"} Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.398438 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.520993 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd69s\" (UniqueName: \"kubernetes.io/projected/c878af65-d3fd-4eae-9818-a30e27c363ec-kube-api-access-zd69s\") pod \"c878af65-d3fd-4eae-9818-a30e27c363ec\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.521063 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-ssh-key-openstack-edpm-ipam\") pod \"c878af65-d3fd-4eae-9818-a30e27c363ec\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.521407 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-inventory-0\") pod \"c878af65-d3fd-4eae-9818-a30e27c363ec\" (UID: \"c878af65-d3fd-4eae-9818-a30e27c363ec\") " Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.553117 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c878af65-d3fd-4eae-9818-a30e27c363ec-kube-api-access-zd69s" (OuterVolumeSpecName: "kube-api-access-zd69s") pod "c878af65-d3fd-4eae-9818-a30e27c363ec" (UID: "c878af65-d3fd-4eae-9818-a30e27c363ec"). InnerVolumeSpecName "kube-api-access-zd69s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.562023 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "c878af65-d3fd-4eae-9818-a30e27c363ec" (UID: "c878af65-d3fd-4eae-9818-a30e27c363ec"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.565379 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "c878af65-d3fd-4eae-9818-a30e27c363ec" (UID: "c878af65-d3fd-4eae-9818-a30e27c363ec"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.624493 4721 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.624542 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd69s\" (UniqueName: \"kubernetes.io/projected/c878af65-d3fd-4eae-9818-a30e27c363ec-kube-api-access-zd69s\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.624558 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c878af65-d3fd-4eae-9818-a30e27c363ec-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.785864 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" event={"ID":"c878af65-d3fd-4eae-9818-a30e27c363ec","Type":"ContainerDied","Data":"21d5474379f4632ba37b4c9031aa003c74f9549610318062d3c3939a89392ed4"} Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.785907 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21d5474379f4632ba37b4c9031aa003c74f9549610318062d3c3939a89392ed4" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.785955 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4989x" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.868666 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq"] Jan 30 22:02:27 crc kubenswrapper[4721]: E0130 22:02:27.869177 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c878af65-d3fd-4eae-9818-a30e27c363ec" containerName="ssh-known-hosts-edpm-deployment" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.869196 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c878af65-d3fd-4eae-9818-a30e27c363ec" containerName="ssh-known-hosts-edpm-deployment" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.869459 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c878af65-d3fd-4eae-9818-a30e27c363ec" containerName="ssh-known-hosts-edpm-deployment" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.870438 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.872865 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.873099 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.873451 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.873504 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:02:27 crc kubenswrapper[4721]: I0130 22:02:27.901574 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq"] Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.032796 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.032970 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxvp4\" (UniqueName: \"kubernetes.io/projected/982c1f39-3c88-4f1b-a5ea-4db039e1201e-kube-api-access-rxvp4\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.033215 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.135069 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.135199 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.135282 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxvp4\" (UniqueName: \"kubernetes.io/projected/982c1f39-3c88-4f1b-a5ea-4db039e1201e-kube-api-access-rxvp4\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.141804 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.143268 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.158325 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxvp4\" (UniqueName: \"kubernetes.io/projected/982c1f39-3c88-4f1b-a5ea-4db039e1201e-kube-api-access-rxvp4\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jt6rq\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.199911 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:28 crc kubenswrapper[4721]: W0130 22:02:28.768540 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod982c1f39_3c88_4f1b_a5ea_4db039e1201e.slice/crio-92cfdac3e3621009fb7f56da8038d5f72dc48331532ad50ea93ec64c26cbffc7 WatchSource:0}: Error finding container 92cfdac3e3621009fb7f56da8038d5f72dc48331532ad50ea93ec64c26cbffc7: Status 404 returned error can't find the container with id 92cfdac3e3621009fb7f56da8038d5f72dc48331532ad50ea93ec64c26cbffc7 Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.780018 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq"] Jan 30 22:02:28 crc kubenswrapper[4721]: I0130 22:02:28.795021 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" event={"ID":"982c1f39-3c88-4f1b-a5ea-4db039e1201e","Type":"ContainerStarted","Data":"92cfdac3e3621009fb7f56da8038d5f72dc48331532ad50ea93ec64c26cbffc7"} Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.452656 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.453024 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.453083 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.453896 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bf92663f77af193c5a48bd2b97d08d0766d0048fc46d63b788adab121454a826"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.453954 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://bf92663f77af193c5a48bd2b97d08d0766d0048fc46d63b788adab121454a826" gracePeriod=600 Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.806565 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="bf92663f77af193c5a48bd2b97d08d0766d0048fc46d63b788adab121454a826" exitCode=0 Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.806626 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"bf92663f77af193c5a48bd2b97d08d0766d0048fc46d63b788adab121454a826"} Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.807561 4721 scope.go:117] "RemoveContainer" containerID="e772c1eed5d3928631fc4ceb9aa64e3150a4066cf9cc3f44da74f04d0adca13b" Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.810096 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" event={"ID":"982c1f39-3c88-4f1b-a5ea-4db039e1201e","Type":"ContainerStarted","Data":"42a772e182ca24dcd00701e6726779d89486585fa8940c2bfe72fdc1e5adfdc0"} Jan 30 22:02:29 crc kubenswrapper[4721]: I0130 22:02:29.833275 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" podStartSLOduration=2.3885443029999998 podStartE2EDuration="2.83325304s" podCreationTimestamp="2026-01-30 22:02:27 +0000 UTC" firstStartedPulling="2026-01-30 22:02:28.772082473 +0000 UTC m=+2737.563983729" lastFinishedPulling="2026-01-30 22:02:29.21679122 +0000 UTC m=+2738.008692466" observedRunningTime="2026-01-30 22:02:29.824679419 +0000 UTC m=+2738.616580685" watchObservedRunningTime="2026-01-30 22:02:29.83325304 +0000 UTC m=+2738.625154286" Jan 30 22:02:30 crc kubenswrapper[4721]: I0130 22:02:30.826640 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d"} Jan 30 22:02:36 crc kubenswrapper[4721]: I0130 22:02:36.881007 4721 generic.go:334] "Generic (PLEG): container finished" podID="982c1f39-3c88-4f1b-a5ea-4db039e1201e" containerID="42a772e182ca24dcd00701e6726779d89486585fa8940c2bfe72fdc1e5adfdc0" exitCode=0 Jan 30 22:02:36 crc kubenswrapper[4721]: I0130 22:02:36.881103 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" event={"ID":"982c1f39-3c88-4f1b-a5ea-4db039e1201e","Type":"ContainerDied","Data":"42a772e182ca24dcd00701e6726779d89486585fa8940c2bfe72fdc1e5adfdc0"} Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.413037 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.493746 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-inventory\") pod \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.494017 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-ssh-key-openstack-edpm-ipam\") pod \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.494165 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxvp4\" (UniqueName: \"kubernetes.io/projected/982c1f39-3c88-4f1b-a5ea-4db039e1201e-kube-api-access-rxvp4\") pod \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\" (UID: \"982c1f39-3c88-4f1b-a5ea-4db039e1201e\") " Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.500495 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/982c1f39-3c88-4f1b-a5ea-4db039e1201e-kube-api-access-rxvp4" (OuterVolumeSpecName: "kube-api-access-rxvp4") pod "982c1f39-3c88-4f1b-a5ea-4db039e1201e" (UID: "982c1f39-3c88-4f1b-a5ea-4db039e1201e"). InnerVolumeSpecName "kube-api-access-rxvp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.523692 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "982c1f39-3c88-4f1b-a5ea-4db039e1201e" (UID: "982c1f39-3c88-4f1b-a5ea-4db039e1201e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.525502 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-inventory" (OuterVolumeSpecName: "inventory") pod "982c1f39-3c88-4f1b-a5ea-4db039e1201e" (UID: "982c1f39-3c88-4f1b-a5ea-4db039e1201e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.596140 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.596183 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxvp4\" (UniqueName: \"kubernetes.io/projected/982c1f39-3c88-4f1b-a5ea-4db039e1201e-kube-api-access-rxvp4\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.596197 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/982c1f39-3c88-4f1b-a5ea-4db039e1201e-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.899726 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" event={"ID":"982c1f39-3c88-4f1b-a5ea-4db039e1201e","Type":"ContainerDied","Data":"92cfdac3e3621009fb7f56da8038d5f72dc48331532ad50ea93ec64c26cbffc7"} Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.899767 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92cfdac3e3621009fb7f56da8038d5f72dc48331532ad50ea93ec64c26cbffc7" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.899778 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jt6rq" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.992649 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk"] Jan 30 22:02:38 crc kubenswrapper[4721]: E0130 22:02:38.997687 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="982c1f39-3c88-4f1b-a5ea-4db039e1201e" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.997722 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="982c1f39-3c88-4f1b-a5ea-4db039e1201e" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.998068 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="982c1f39-3c88-4f1b-a5ea-4db039e1201e" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:38 crc kubenswrapper[4721]: I0130 22:02:38.998941 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.010370 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.010469 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.012184 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.012581 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.041143 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk"] Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.104657 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.104875 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.104923 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlx9p\" (UniqueName: \"kubernetes.io/projected/ffae7484-d197-4caa-8553-151666fded73-kube-api-access-qlx9p\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.206897 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.207045 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.207097 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlx9p\" (UniqueName: \"kubernetes.io/projected/ffae7484-d197-4caa-8553-151666fded73-kube-api-access-qlx9p\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.216820 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: E0130 22:02:39.216884 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod982c1f39_3c88_4f1b_a5ea_4db039e1201e.slice\": RecentStats: unable to find data in memory cache]" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.217152 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.227633 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlx9p\" (UniqueName: \"kubernetes.io/projected/ffae7484-d197-4caa-8553-151666fded73-kube-api-access-qlx9p\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.347541 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:39 crc kubenswrapper[4721]: I0130 22:02:39.916378 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk"] Jan 30 22:02:39 crc kubenswrapper[4721]: W0130 22:02:39.919982 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffae7484_d197_4caa_8553_151666fded73.slice/crio-194527f0dc3e7ce262a4641dc12f5de808de2999368375d57d6c8472e64ea3f0 WatchSource:0}: Error finding container 194527f0dc3e7ce262a4641dc12f5de808de2999368375d57d6c8472e64ea3f0: Status 404 returned error can't find the container with id 194527f0dc3e7ce262a4641dc12f5de808de2999368375d57d6c8472e64ea3f0 Jan 30 22:02:40 crc kubenswrapper[4721]: I0130 22:02:40.917900 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" event={"ID":"ffae7484-d197-4caa-8553-151666fded73","Type":"ContainerStarted","Data":"737ab7d380181c006c7c4c63d64063f8a61a05121a68a33dd311b7fc7e783f13"} Jan 30 22:02:40 crc kubenswrapper[4721]: I0130 22:02:40.918229 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" event={"ID":"ffae7484-d197-4caa-8553-151666fded73","Type":"ContainerStarted","Data":"194527f0dc3e7ce262a4641dc12f5de808de2999368375d57d6c8472e64ea3f0"} Jan 30 22:02:49 crc kubenswrapper[4721]: E0130 22:02:49.481847 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffae7484_d197_4caa_8553_151666fded73.slice/crio-737ab7d380181c006c7c4c63d64063f8a61a05121a68a33dd311b7fc7e783f13.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffae7484_d197_4caa_8553_151666fded73.slice/crio-conmon-737ab7d380181c006c7c4c63d64063f8a61a05121a68a33dd311b7fc7e783f13.scope\": RecentStats: unable to find data in memory cache]" Jan 30 22:02:49 crc kubenswrapper[4721]: I0130 22:02:49.997870 4721 generic.go:334] "Generic (PLEG): container finished" podID="ffae7484-d197-4caa-8553-151666fded73" containerID="737ab7d380181c006c7c4c63d64063f8a61a05121a68a33dd311b7fc7e783f13" exitCode=0 Jan 30 22:02:49 crc kubenswrapper[4721]: I0130 22:02:49.997950 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" event={"ID":"ffae7484-d197-4caa-8553-151666fded73","Type":"ContainerDied","Data":"737ab7d380181c006c7c4c63d64063f8a61a05121a68a33dd311b7fc7e783f13"} Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.522823 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.693855 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-ssh-key-openstack-edpm-ipam\") pod \"ffae7484-d197-4caa-8553-151666fded73\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.693967 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlx9p\" (UniqueName: \"kubernetes.io/projected/ffae7484-d197-4caa-8553-151666fded73-kube-api-access-qlx9p\") pod \"ffae7484-d197-4caa-8553-151666fded73\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.694099 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-inventory\") pod \"ffae7484-d197-4caa-8553-151666fded73\" (UID: \"ffae7484-d197-4caa-8553-151666fded73\") " Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.699887 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffae7484-d197-4caa-8553-151666fded73-kube-api-access-qlx9p" (OuterVolumeSpecName: "kube-api-access-qlx9p") pod "ffae7484-d197-4caa-8553-151666fded73" (UID: "ffae7484-d197-4caa-8553-151666fded73"). InnerVolumeSpecName "kube-api-access-qlx9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.723611 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ffae7484-d197-4caa-8553-151666fded73" (UID: "ffae7484-d197-4caa-8553-151666fded73"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.730936 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-inventory" (OuterVolumeSpecName: "inventory") pod "ffae7484-d197-4caa-8553-151666fded73" (UID: "ffae7484-d197-4caa-8553-151666fded73"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.797123 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.797180 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ffae7484-d197-4caa-8553-151666fded73-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:51 crc kubenswrapper[4721]: I0130 22:02:51.797197 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlx9p\" (UniqueName: \"kubernetes.io/projected/ffae7484-d197-4caa-8553-151666fded73-kube-api-access-qlx9p\") on node \"crc\" DevicePath \"\"" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.037453 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" event={"ID":"ffae7484-d197-4caa-8553-151666fded73","Type":"ContainerDied","Data":"194527f0dc3e7ce262a4641dc12f5de808de2999368375d57d6c8472e64ea3f0"} Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.037506 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="194527f0dc3e7ce262a4641dc12f5de808de2999368375d57d6c8472e64ea3f0" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.045454 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.108803 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr"] Jan 30 22:02:52 crc kubenswrapper[4721]: E0130 22:02:52.109169 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffae7484-d197-4caa-8553-151666fded73" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.109185 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffae7484-d197-4caa-8553-151666fded73" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.122168 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffae7484-d197-4caa-8553-151666fded73" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.123661 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr"] Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.123781 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.128857 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129188 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129383 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129487 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129536 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129660 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129708 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.129816 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.306700 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.306773 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.306985 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307185 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307231 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307343 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307422 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307448 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f2vz\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-kube-api-access-9f2vz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307615 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307675 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307828 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307866 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307918 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.307977 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410669 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410711 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410848 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410892 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410954 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.410998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.411021 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.411056 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.411096 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.411118 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f2vz\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-kube-api-access-9f2vz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.411154 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.411178 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.416679 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.417079 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.417123 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.417567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.417708 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.418272 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.418439 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.418495 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.418909 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.419724 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.420012 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.420127 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.428447 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.428862 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f2vz\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-kube-api-access-9f2vz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.454640 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:02:52 crc kubenswrapper[4721]: I0130 22:02:52.993989 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr"] Jan 30 22:02:52 crc kubenswrapper[4721]: W0130 22:02:52.997007 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2c051d1_6a5d_4950_953d_204cc6adfc6e.slice/crio-7d679a30f774f7b9f1bf082306435b8e22a4441de678268a59a021ed727ee377 WatchSource:0}: Error finding container 7d679a30f774f7b9f1bf082306435b8e22a4441de678268a59a021ed727ee377: Status 404 returned error can't find the container with id 7d679a30f774f7b9f1bf082306435b8e22a4441de678268a59a021ed727ee377 Jan 30 22:02:53 crc kubenswrapper[4721]: I0130 22:02:53.049099 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" event={"ID":"f2c051d1-6a5d-4950-953d-204cc6adfc6e","Type":"ContainerStarted","Data":"7d679a30f774f7b9f1bf082306435b8e22a4441de678268a59a021ed727ee377"} Jan 30 22:02:54 crc kubenswrapper[4721]: I0130 22:02:54.059718 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" event={"ID":"f2c051d1-6a5d-4950-953d-204cc6adfc6e","Type":"ContainerStarted","Data":"ae87a2dfa7a787662170a5224e20e6d0b714727a1180b96de620f2b878608591"} Jan 30 22:02:54 crc kubenswrapper[4721]: I0130 22:02:54.088976 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" podStartSLOduration=1.658701607 podStartE2EDuration="2.088952268s" podCreationTimestamp="2026-01-30 22:02:52 +0000 UTC" firstStartedPulling="2026-01-30 22:02:53.000561692 +0000 UTC m=+2761.792462958" lastFinishedPulling="2026-01-30 22:02:53.430812373 +0000 UTC m=+2762.222713619" observedRunningTime="2026-01-30 22:02:54.076723642 +0000 UTC m=+2762.868624888" watchObservedRunningTime="2026-01-30 22:02:54.088952268 +0000 UTC m=+2762.880853514" Jan 30 22:03:00 crc kubenswrapper[4721]: I0130 22:03:00.043517 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-46g5v"] Jan 30 22:03:00 crc kubenswrapper[4721]: I0130 22:03:00.054187 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-46g5v"] Jan 30 22:03:00 crc kubenswrapper[4721]: I0130 22:03:00.103668 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f511802d-cde5-4900-8a57-b06ebf1bab3b" path="/var/lib/kubelet/pods/f511802d-cde5-4900-8a57-b06ebf1bab3b/volumes" Jan 30 22:03:10 crc kubenswrapper[4721]: I0130 22:03:10.565280 4721 scope.go:117] "RemoveContainer" containerID="04beb6d06e4ab9cdeef1d0239db123ec2fffe6856309c35ed9c462e9c8b8d282" Jan 30 22:03:10 crc kubenswrapper[4721]: I0130 22:03:10.619332 4721 scope.go:117] "RemoveContainer" containerID="055cbf36022f062ac3abda5376468704eef07db15d7f11d71db669ef8299a6d3" Jan 30 22:03:10 crc kubenswrapper[4721]: I0130 22:03:10.675987 4721 scope.go:117] "RemoveContainer" containerID="e618f6d2c31f7c68ad4e3e1479d2259b6e7ed9c9883e137e3960be8c8e935b4d" Jan 30 22:03:27 crc kubenswrapper[4721]: I0130 22:03:27.414462 4721 generic.go:334] "Generic (PLEG): container finished" podID="f2c051d1-6a5d-4950-953d-204cc6adfc6e" containerID="ae87a2dfa7a787662170a5224e20e6d0b714727a1180b96de620f2b878608591" exitCode=0 Jan 30 22:03:27 crc kubenswrapper[4721]: I0130 22:03:27.414529 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" event={"ID":"f2c051d1-6a5d-4950-953d-204cc6adfc6e","Type":"ContainerDied","Data":"ae87a2dfa7a787662170a5224e20e6d0b714727a1180b96de620f2b878608591"} Jan 30 22:03:28 crc kubenswrapper[4721]: I0130 22:03:28.959499 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057362 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057427 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057464 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057495 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-libvirt-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057558 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-neutron-metadata-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057643 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ovn-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057668 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-inventory\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057717 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-repo-setup-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057740 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-nova-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057778 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-telemetry-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057874 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-bootstrap-combined-ca-bundle\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057891 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9f2vz\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-kube-api-access-9f2vz\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057946 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.057994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ssh-key-openstack-edpm-ipam\") pod \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\" (UID: \"f2c051d1-6a5d-4950-953d-204cc6adfc6e\") " Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.065001 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.065074 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.066794 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.066858 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.066934 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.067278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.067429 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.068589 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.069872 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-kube-api-access-9f2vz" (OuterVolumeSpecName: "kube-api-access-9f2vz") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "kube-api-access-9f2vz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.073626 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.074566 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.077666 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.094799 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-inventory" (OuterVolumeSpecName: "inventory") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.096565 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f2c051d1-6a5d-4950-953d-204cc6adfc6e" (UID: "f2c051d1-6a5d-4950-953d-204cc6adfc6e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.162983 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163021 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163036 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163066 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163078 4721 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163087 4721 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163096 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163107 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163116 4721 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163124 4721 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163132 4721 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163142 4721 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c051d1-6a5d-4950-953d-204cc6adfc6e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163150 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9f2vz\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-kube-api-access-9f2vz\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.163159 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f2c051d1-6a5d-4950-953d-204cc6adfc6e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.440384 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" event={"ID":"f2c051d1-6a5d-4950-953d-204cc6adfc6e","Type":"ContainerDied","Data":"7d679a30f774f7b9f1bf082306435b8e22a4441de678268a59a021ed727ee377"} Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.440424 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d679a30f774f7b9f1bf082306435b8e22a4441de678268a59a021ed727ee377" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.440470 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.552824 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd"] Jan 30 22:03:29 crc kubenswrapper[4721]: E0130 22:03:29.553888 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c051d1-6a5d-4950-953d-204cc6adfc6e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.553913 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c051d1-6a5d-4950-953d-204cc6adfc6e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.554413 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c051d1-6a5d-4950-953d-204cc6adfc6e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.556073 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.559417 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.560777 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.560849 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.560785 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.561976 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.571784 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.571865 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.571903 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.572283 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6wv7\" (UniqueName: \"kubernetes.io/projected/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-kube-api-access-g6wv7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.572568 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.573665 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd"] Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.674421 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6wv7\" (UniqueName: \"kubernetes.io/projected/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-kube-api-access-g6wv7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.674513 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.674562 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.674609 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.674639 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.675727 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.679104 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.679848 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.686750 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.696145 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6wv7\" (UniqueName: \"kubernetes.io/projected/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-kube-api-access-g6wv7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qbsgd\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:29 crc kubenswrapper[4721]: I0130 22:03:29.880943 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:03:30 crc kubenswrapper[4721]: I0130 22:03:30.434755 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd"] Jan 30 22:03:30 crc kubenswrapper[4721]: I0130 22:03:30.443540 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 22:03:30 crc kubenswrapper[4721]: I0130 22:03:30.455394 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" event={"ID":"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86","Type":"ContainerStarted","Data":"4f3a40e576543aaf4df935ddf9f2a5bfbbb7c7adf53a21bb8db38a44a5ccbd21"} Jan 30 22:03:31 crc kubenswrapper[4721]: I0130 22:03:31.467139 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" event={"ID":"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86","Type":"ContainerStarted","Data":"3f36f4eb712e9530806344ceaacf416e8eaae4258b1270c2edeff18a7d9c7b16"} Jan 30 22:03:31 crc kubenswrapper[4721]: I0130 22:03:31.505260 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" podStartSLOduration=1.919924028 podStartE2EDuration="2.505238954s" podCreationTimestamp="2026-01-30 22:03:29 +0000 UTC" firstStartedPulling="2026-01-30 22:03:30.443213559 +0000 UTC m=+2799.235114805" lastFinishedPulling="2026-01-30 22:03:31.028528485 +0000 UTC m=+2799.820429731" observedRunningTime="2026-01-30 22:03:31.492673796 +0000 UTC m=+2800.284575062" watchObservedRunningTime="2026-01-30 22:03:31.505238954 +0000 UTC m=+2800.297140210" Jan 30 22:03:50 crc kubenswrapper[4721]: I0130 22:03:50.042134 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-wsdsc"] Jan 30 22:03:50 crc kubenswrapper[4721]: I0130 22:03:50.052176 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-wsdsc"] Jan 30 22:03:50 crc kubenswrapper[4721]: I0130 22:03:50.129109 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="580cb859-85bd-451b-a61b-1c1dfde44b17" path="/var/lib/kubelet/pods/580cb859-85bd-451b-a61b-1c1dfde44b17/volumes" Jan 30 22:03:56 crc kubenswrapper[4721]: I0130 22:03:56.028727 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-ttrpx"] Jan 30 22:03:56 crc kubenswrapper[4721]: I0130 22:03:56.045896 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-ttrpx"] Jan 30 22:03:56 crc kubenswrapper[4721]: I0130 22:03:56.103005 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="387eb724-8919-4a46-919a-083c3b427d3e" path="/var/lib/kubelet/pods/387eb724-8919-4a46-919a-083c3b427d3e/volumes" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.717291 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hcjfs"] Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.720098 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.742584 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hcjfs"] Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.818929 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-catalog-content\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.818978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8sfr\" (UniqueName: \"kubernetes.io/projected/eb235842-6e9d-425f-a416-c6bf490cf8fe-kube-api-access-v8sfr\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.819368 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-utilities\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.921720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-utilities\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.921921 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-catalog-content\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.921947 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8sfr\" (UniqueName: \"kubernetes.io/projected/eb235842-6e9d-425f-a416-c6bf490cf8fe-kube-api-access-v8sfr\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.922567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-utilities\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.922680 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-catalog-content\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:05 crc kubenswrapper[4721]: I0130 22:04:05.947391 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8sfr\" (UniqueName: \"kubernetes.io/projected/eb235842-6e9d-425f-a416-c6bf490cf8fe-kube-api-access-v8sfr\") pod \"certified-operators-hcjfs\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:06 crc kubenswrapper[4721]: I0130 22:04:06.044828 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:06 crc kubenswrapper[4721]: I0130 22:04:06.569605 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hcjfs"] Jan 30 22:04:06 crc kubenswrapper[4721]: I0130 22:04:06.839791 4721 generic.go:334] "Generic (PLEG): container finished" podID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerID="85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf" exitCode=0 Jan 30 22:04:06 crc kubenswrapper[4721]: I0130 22:04:06.839873 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerDied","Data":"85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf"} Jan 30 22:04:06 crc kubenswrapper[4721]: I0130 22:04:06.840136 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerStarted","Data":"495438ac91332a48f3bf3dddc7a533aa07ef761008462bd722014591e70f320a"} Jan 30 22:04:07 crc kubenswrapper[4721]: I0130 22:04:07.851312 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerStarted","Data":"7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83"} Jan 30 22:04:10 crc kubenswrapper[4721]: I0130 22:04:10.792594 4721 scope.go:117] "RemoveContainer" containerID="a79a988e0b9dfae21bafce3c65d80d06438549eb293da70d2694d7f6a95a29df" Jan 30 22:04:10 crc kubenswrapper[4721]: I0130 22:04:10.836112 4721 scope.go:117] "RemoveContainer" containerID="bab0201974485fa337d3a9a065d623c27f5cca7996174c0741468d30fbb998aa" Jan 30 22:04:11 crc kubenswrapper[4721]: I0130 22:04:11.902111 4721 generic.go:334] "Generic (PLEG): container finished" podID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerID="7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83" exitCode=0 Jan 30 22:04:11 crc kubenswrapper[4721]: I0130 22:04:11.902186 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerDied","Data":"7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83"} Jan 30 22:04:12 crc kubenswrapper[4721]: I0130 22:04:12.916336 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerStarted","Data":"fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d"} Jan 30 22:04:12 crc kubenswrapper[4721]: I0130 22:04:12.948526 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hcjfs" podStartSLOduration=2.452866868 podStartE2EDuration="7.948497793s" podCreationTimestamp="2026-01-30 22:04:05 +0000 UTC" firstStartedPulling="2026-01-30 22:04:06.843705039 +0000 UTC m=+2835.635606285" lastFinishedPulling="2026-01-30 22:04:12.339335954 +0000 UTC m=+2841.131237210" observedRunningTime="2026-01-30 22:04:12.940149209 +0000 UTC m=+2841.732050465" watchObservedRunningTime="2026-01-30 22:04:12.948497793 +0000 UTC m=+2841.740399039" Jan 30 22:04:16 crc kubenswrapper[4721]: I0130 22:04:16.045167 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:16 crc kubenswrapper[4721]: I0130 22:04:16.045587 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:16 crc kubenswrapper[4721]: I0130 22:04:16.111131 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:26 crc kubenswrapper[4721]: I0130 22:04:26.106313 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:26 crc kubenswrapper[4721]: I0130 22:04:26.159023 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hcjfs"] Jan 30 22:04:27 crc kubenswrapper[4721]: I0130 22:04:27.100976 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hcjfs" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="registry-server" containerID="cri-o://fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d" gracePeriod=2 Jan 30 22:04:27 crc kubenswrapper[4721]: I0130 22:04:27.851865 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.015568 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-catalog-content\") pod \"eb235842-6e9d-425f-a416-c6bf490cf8fe\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.015646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8sfr\" (UniqueName: \"kubernetes.io/projected/eb235842-6e9d-425f-a416-c6bf490cf8fe-kube-api-access-v8sfr\") pod \"eb235842-6e9d-425f-a416-c6bf490cf8fe\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.015907 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-utilities\") pod \"eb235842-6e9d-425f-a416-c6bf490cf8fe\" (UID: \"eb235842-6e9d-425f-a416-c6bf490cf8fe\") " Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.017237 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-utilities" (OuterVolumeSpecName: "utilities") pod "eb235842-6e9d-425f-a416-c6bf490cf8fe" (UID: "eb235842-6e9d-425f-a416-c6bf490cf8fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.022659 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb235842-6e9d-425f-a416-c6bf490cf8fe-kube-api-access-v8sfr" (OuterVolumeSpecName: "kube-api-access-v8sfr") pod "eb235842-6e9d-425f-a416-c6bf490cf8fe" (UID: "eb235842-6e9d-425f-a416-c6bf490cf8fe"). InnerVolumeSpecName "kube-api-access-v8sfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.070899 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb235842-6e9d-425f-a416-c6bf490cf8fe" (UID: "eb235842-6e9d-425f-a416-c6bf490cf8fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.113473 4721 generic.go:334] "Generic (PLEG): container finished" podID="0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" containerID="3f36f4eb712e9530806344ceaacf416e8eaae4258b1270c2edeff18a7d9c7b16" exitCode=0 Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.113544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" event={"ID":"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86","Type":"ContainerDied","Data":"3f36f4eb712e9530806344ceaacf416e8eaae4258b1270c2edeff18a7d9c7b16"} Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.115993 4721 generic.go:334] "Generic (PLEG): container finished" podID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerID="fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d" exitCode=0 Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.116021 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerDied","Data":"fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d"} Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.116040 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcjfs" event={"ID":"eb235842-6e9d-425f-a416-c6bf490cf8fe","Type":"ContainerDied","Data":"495438ac91332a48f3bf3dddc7a533aa07ef761008462bd722014591e70f320a"} Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.116060 4721 scope.go:117] "RemoveContainer" containerID="fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.116190 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcjfs" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.118200 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.118229 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb235842-6e9d-425f-a416-c6bf490cf8fe-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.118244 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8sfr\" (UniqueName: \"kubernetes.io/projected/eb235842-6e9d-425f-a416-c6bf490cf8fe-kube-api-access-v8sfr\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.141482 4721 scope.go:117] "RemoveContainer" containerID="7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.162160 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hcjfs"] Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.168710 4721 scope.go:117] "RemoveContainer" containerID="85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.173552 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hcjfs"] Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.222230 4721 scope.go:117] "RemoveContainer" containerID="fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d" Jan 30 22:04:28 crc kubenswrapper[4721]: E0130 22:04:28.223483 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d\": container with ID starting with fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d not found: ID does not exist" containerID="fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.223536 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d"} err="failed to get container status \"fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d\": rpc error: code = NotFound desc = could not find container \"fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d\": container with ID starting with fd5edbaa938347589760e179428c0a401d52d0f303d871283ba51ebfe23bbf4d not found: ID does not exist" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.223570 4721 scope.go:117] "RemoveContainer" containerID="7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83" Jan 30 22:04:28 crc kubenswrapper[4721]: E0130 22:04:28.224186 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83\": container with ID starting with 7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83 not found: ID does not exist" containerID="7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.224230 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83"} err="failed to get container status \"7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83\": rpc error: code = NotFound desc = could not find container \"7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83\": container with ID starting with 7253e1b50eb2ff98236d062e34761c88cb802e982cc23eecaa4ec185e0122c83 not found: ID does not exist" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.224251 4721 scope.go:117] "RemoveContainer" containerID="85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf" Jan 30 22:04:28 crc kubenswrapper[4721]: E0130 22:04:28.224607 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf\": container with ID starting with 85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf not found: ID does not exist" containerID="85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf" Jan 30 22:04:28 crc kubenswrapper[4721]: I0130 22:04:28.224637 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf"} err="failed to get container status \"85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf\": rpc error: code = NotFound desc = could not find container \"85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf\": container with ID starting with 85ce4c2d036c77f096a58bdde4004ae1e30da4415be576ce76545dfed1c706cf not found: ID does not exist" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.448610 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.448667 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.690796 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.880994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6wv7\" (UniqueName: \"kubernetes.io/projected/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-kube-api-access-g6wv7\") pod \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.881100 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovn-combined-ca-bundle\") pod \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.881122 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovncontroller-config-0\") pod \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.881177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ssh-key-openstack-edpm-ipam\") pod \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.881226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-inventory\") pod \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\" (UID: \"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86\") " Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.887740 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-kube-api-access-g6wv7" (OuterVolumeSpecName: "kube-api-access-g6wv7") pod "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" (UID: "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86"). InnerVolumeSpecName "kube-api-access-g6wv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.889525 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" (UID: "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.916558 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-inventory" (OuterVolumeSpecName: "inventory") pod "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" (UID: "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.916767 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" (UID: "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.925956 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" (UID: "0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.983813 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6wv7\" (UniqueName: \"kubernetes.io/projected/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-kube-api-access-g6wv7\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.983857 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.983866 4721 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.983875 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:29 crc kubenswrapper[4721]: I0130 22:04:29.983884 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.103394 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" path="/var/lib/kubelet/pods/eb235842-6e9d-425f-a416-c6bf490cf8fe/volumes" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.138623 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" event={"ID":"0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86","Type":"ContainerDied","Data":"4f3a40e576543aaf4df935ddf9f2a5bfbbb7c7adf53a21bb8db38a44a5ccbd21"} Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.138680 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f3a40e576543aaf4df935ddf9f2a5bfbbb7c7adf53a21bb8db38a44a5ccbd21" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.138686 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qbsgd" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222121 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k"] Jan 30 22:04:30 crc kubenswrapper[4721]: E0130 22:04:30.222669 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222689 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 30 22:04:30 crc kubenswrapper[4721]: E0130 22:04:30.222726 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="extract-utilities" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222735 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="extract-utilities" Jan 30 22:04:30 crc kubenswrapper[4721]: E0130 22:04:30.222743 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="registry-server" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222749 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="registry-server" Jan 30 22:04:30 crc kubenswrapper[4721]: E0130 22:04:30.222761 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="extract-content" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222768 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="extract-content" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222974 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.222987 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb235842-6e9d-425f-a416-c6bf490cf8fe" containerName="registry-server" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.223750 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.227696 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.227714 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.227983 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.228007 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.228128 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.228137 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.237311 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k"] Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.392534 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.392888 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8br4s\" (UniqueName: \"kubernetes.io/projected/433d1a2c-a03e-483a-9dba-2adde950cf1f-kube-api-access-8br4s\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.392981 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.393063 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.393133 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.393229 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.495664 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.496049 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8br4s\" (UniqueName: \"kubernetes.io/projected/433d1a2c-a03e-483a-9dba-2adde950cf1f-kube-api-access-8br4s\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.496074 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.496105 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.496130 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.496171 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.501034 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.502720 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.503530 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.503790 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.505531 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.514208 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8br4s\" (UniqueName: \"kubernetes.io/projected/433d1a2c-a03e-483a-9dba-2adde950cf1f-kube-api-access-8br4s\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:30 crc kubenswrapper[4721]: I0130 22:04:30.540315 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:04:31 crc kubenswrapper[4721]: I0130 22:04:31.097750 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k"] Jan 30 22:04:31 crc kubenswrapper[4721]: I0130 22:04:31.171091 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" event={"ID":"433d1a2c-a03e-483a-9dba-2adde950cf1f","Type":"ContainerStarted","Data":"26d84d438195720b1f85f29299c7ba61ebdf962de627cde491f0c2d85b2dda52"} Jan 30 22:04:32 crc kubenswrapper[4721]: I0130 22:04:32.183760 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" event={"ID":"433d1a2c-a03e-483a-9dba-2adde950cf1f","Type":"ContainerStarted","Data":"4a5320ac6e39c868ea64ce21bca4ca04401d72c62b7a07aad37a315f4c601ed0"} Jan 30 22:04:32 crc kubenswrapper[4721]: I0130 22:04:32.217825 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" podStartSLOduration=1.7971957779999999 podStartE2EDuration="2.217809632s" podCreationTimestamp="2026-01-30 22:04:30 +0000 UTC" firstStartedPulling="2026-01-30 22:04:31.124863734 +0000 UTC m=+2859.916764980" lastFinishedPulling="2026-01-30 22:04:31.545477588 +0000 UTC m=+2860.337378834" observedRunningTime="2026-01-30 22:04:32.209009567 +0000 UTC m=+2861.000910813" watchObservedRunningTime="2026-01-30 22:04:32.217809632 +0000 UTC m=+2861.009710878" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.437619 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n9ntc"] Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.441010 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.472259 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n9ntc"] Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.577876 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nthwq\" (UniqueName: \"kubernetes.io/projected/5159a1c7-3608-4300-941d-704879490fc4-kube-api-access-nthwq\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.577977 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-catalog-content\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.578113 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-utilities\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.679721 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-utilities\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.679873 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nthwq\" (UniqueName: \"kubernetes.io/projected/5159a1c7-3608-4300-941d-704879490fc4-kube-api-access-nthwq\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.679911 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-catalog-content\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.680233 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-utilities\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.680372 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-catalog-content\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.707076 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nthwq\" (UniqueName: \"kubernetes.io/projected/5159a1c7-3608-4300-941d-704879490fc4-kube-api-access-nthwq\") pod \"redhat-operators-n9ntc\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:56 crc kubenswrapper[4721]: I0130 22:04:56.769810 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:04:57 crc kubenswrapper[4721]: I0130 22:04:57.279729 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n9ntc"] Jan 30 22:04:57 crc kubenswrapper[4721]: I0130 22:04:57.425645 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerStarted","Data":"d31d56dc74abba80b6b61e5b9f45747015a3dcd4a14407f71ff0d5e896d5ce1a"} Jan 30 22:04:58 crc kubenswrapper[4721]: I0130 22:04:58.437103 4721 generic.go:334] "Generic (PLEG): container finished" podID="5159a1c7-3608-4300-941d-704879490fc4" containerID="88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1" exitCode=0 Jan 30 22:04:58 crc kubenswrapper[4721]: I0130 22:04:58.437198 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerDied","Data":"88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1"} Jan 30 22:04:59 crc kubenswrapper[4721]: I0130 22:04:59.449635 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:04:59 crc kubenswrapper[4721]: I0130 22:04:59.450022 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:05:00 crc kubenswrapper[4721]: I0130 22:05:00.470598 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerStarted","Data":"90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1"} Jan 30 22:05:05 crc kubenswrapper[4721]: I0130 22:05:05.522012 4721 generic.go:334] "Generic (PLEG): container finished" podID="5159a1c7-3608-4300-941d-704879490fc4" containerID="90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1" exitCode=0 Jan 30 22:05:05 crc kubenswrapper[4721]: I0130 22:05:05.522066 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerDied","Data":"90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1"} Jan 30 22:05:06 crc kubenswrapper[4721]: I0130 22:05:06.534797 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerStarted","Data":"734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b"} Jan 30 22:05:06 crc kubenswrapper[4721]: I0130 22:05:06.559876 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n9ntc" podStartSLOduration=2.800797742 podStartE2EDuration="10.559859548s" podCreationTimestamp="2026-01-30 22:04:56 +0000 UTC" firstStartedPulling="2026-01-30 22:04:58.4414335 +0000 UTC m=+2887.233334746" lastFinishedPulling="2026-01-30 22:05:06.200495306 +0000 UTC m=+2894.992396552" observedRunningTime="2026-01-30 22:05:06.553567301 +0000 UTC m=+2895.345468547" watchObservedRunningTime="2026-01-30 22:05:06.559859548 +0000 UTC m=+2895.351760794" Jan 30 22:05:06 crc kubenswrapper[4721]: I0130 22:05:06.770908 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:05:06 crc kubenswrapper[4721]: I0130 22:05:06.770963 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:05:07 crc kubenswrapper[4721]: I0130 22:05:07.816494 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n9ntc" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="registry-server" probeResult="failure" output=< Jan 30 22:05:07 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 22:05:07 crc kubenswrapper[4721]: > Jan 30 22:05:13 crc kubenswrapper[4721]: E0130 22:05:13.220406 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod433d1a2c_a03e_483a_9dba_2adde950cf1f.slice/crio-conmon-4a5320ac6e39c868ea64ce21bca4ca04401d72c62b7a07aad37a315f4c601ed0.scope\": RecentStats: unable to find data in memory cache]" Jan 30 22:05:13 crc kubenswrapper[4721]: I0130 22:05:13.601336 4721 generic.go:334] "Generic (PLEG): container finished" podID="433d1a2c-a03e-483a-9dba-2adde950cf1f" containerID="4a5320ac6e39c868ea64ce21bca4ca04401d72c62b7a07aad37a315f4c601ed0" exitCode=0 Jan 30 22:05:13 crc kubenswrapper[4721]: I0130 22:05:13.601393 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" event={"ID":"433d1a2c-a03e-483a-9dba-2adde950cf1f","Type":"ContainerDied","Data":"4a5320ac6e39c868ea64ce21bca4ca04401d72c62b7a07aad37a315f4c601ed0"} Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.152506 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.300653 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"433d1a2c-a03e-483a-9dba-2adde950cf1f\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.300930 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-inventory\") pod \"433d1a2c-a03e-483a-9dba-2adde950cf1f\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.300989 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-metadata-combined-ca-bundle\") pod \"433d1a2c-a03e-483a-9dba-2adde950cf1f\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.301027 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-ssh-key-openstack-edpm-ipam\") pod \"433d1a2c-a03e-483a-9dba-2adde950cf1f\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.301064 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8br4s\" (UniqueName: \"kubernetes.io/projected/433d1a2c-a03e-483a-9dba-2adde950cf1f-kube-api-access-8br4s\") pod \"433d1a2c-a03e-483a-9dba-2adde950cf1f\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.301196 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-nova-metadata-neutron-config-0\") pod \"433d1a2c-a03e-483a-9dba-2adde950cf1f\" (UID: \"433d1a2c-a03e-483a-9dba-2adde950cf1f\") " Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.308665 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/433d1a2c-a03e-483a-9dba-2adde950cf1f-kube-api-access-8br4s" (OuterVolumeSpecName: "kube-api-access-8br4s") pod "433d1a2c-a03e-483a-9dba-2adde950cf1f" (UID: "433d1a2c-a03e-483a-9dba-2adde950cf1f"). InnerVolumeSpecName "kube-api-access-8br4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.309492 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "433d1a2c-a03e-483a-9dba-2adde950cf1f" (UID: "433d1a2c-a03e-483a-9dba-2adde950cf1f"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.334537 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "433d1a2c-a03e-483a-9dba-2adde950cf1f" (UID: "433d1a2c-a03e-483a-9dba-2adde950cf1f"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.340471 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "433d1a2c-a03e-483a-9dba-2adde950cf1f" (UID: "433d1a2c-a03e-483a-9dba-2adde950cf1f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.340534 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "433d1a2c-a03e-483a-9dba-2adde950cf1f" (UID: "433d1a2c-a03e-483a-9dba-2adde950cf1f"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.362205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-inventory" (OuterVolumeSpecName: "inventory") pod "433d1a2c-a03e-483a-9dba-2adde950cf1f" (UID: "433d1a2c-a03e-483a-9dba-2adde950cf1f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.403865 4721 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.403908 4721 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.403921 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.403932 4721 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.403942 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/433d1a2c-a03e-483a-9dba-2adde950cf1f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.403951 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8br4s\" (UniqueName: \"kubernetes.io/projected/433d1a2c-a03e-483a-9dba-2adde950cf1f-kube-api-access-8br4s\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.622623 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" event={"ID":"433d1a2c-a03e-483a-9dba-2adde950cf1f","Type":"ContainerDied","Data":"26d84d438195720b1f85f29299c7ba61ebdf962de627cde491f0c2d85b2dda52"} Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.622677 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26d84d438195720b1f85f29299c7ba61ebdf962de627cde491f0c2d85b2dda52" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.623028 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.710344 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4"] Jan 30 22:05:15 crc kubenswrapper[4721]: E0130 22:05:15.710886 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="433d1a2c-a03e-483a-9dba-2adde950cf1f" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.710910 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="433d1a2c-a03e-483a-9dba-2adde950cf1f" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.711127 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="433d1a2c-a03e-483a-9dba-2adde950cf1f" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.711929 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.715035 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.715104 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.715416 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.715550 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.720862 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.727618 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4"] Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.813037 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.813208 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.813278 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.813434 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85db6\" (UniqueName: \"kubernetes.io/projected/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-kube-api-access-85db6\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.813529 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.915873 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85db6\" (UniqueName: \"kubernetes.io/projected/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-kube-api-access-85db6\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.915958 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.916063 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.916142 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.916163 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.923175 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.923175 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.923214 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.924513 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:15 crc kubenswrapper[4721]: I0130 22:05:15.935031 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85db6\" (UniqueName: \"kubernetes.io/projected/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-kube-api-access-85db6\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:16 crc kubenswrapper[4721]: I0130 22:05:16.049755 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:05:16 crc kubenswrapper[4721]: I0130 22:05:16.628471 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4"] Jan 30 22:05:16 crc kubenswrapper[4721]: W0130 22:05:16.653593 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75dd1a6a_3fe9_4016_bdb1_bbc9ec572417.slice/crio-ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360 WatchSource:0}: Error finding container ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360: Status 404 returned error can't find the container with id ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360 Jan 30 22:05:16 crc kubenswrapper[4721]: I0130 22:05:16.841229 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:05:16 crc kubenswrapper[4721]: I0130 22:05:16.895054 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:05:17 crc kubenswrapper[4721]: I0130 22:05:17.085491 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n9ntc"] Jan 30 22:05:17 crc kubenswrapper[4721]: I0130 22:05:17.646579 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" event={"ID":"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417","Type":"ContainerStarted","Data":"b65943ed83bc4b8ad8fdef320ed37fe519ef288e69be8ba42f48cb262752b2c2"} Jan 30 22:05:17 crc kubenswrapper[4721]: I0130 22:05:17.646632 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" event={"ID":"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417","Type":"ContainerStarted","Data":"ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360"} Jan 30 22:05:17 crc kubenswrapper[4721]: I0130 22:05:17.676818 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" podStartSLOduration=2.272302693 podStartE2EDuration="2.676792902s" podCreationTimestamp="2026-01-30 22:05:15 +0000 UTC" firstStartedPulling="2026-01-30 22:05:16.660745607 +0000 UTC m=+2905.452646853" lastFinishedPulling="2026-01-30 22:05:17.065235816 +0000 UTC m=+2905.857137062" observedRunningTime="2026-01-30 22:05:17.664523209 +0000 UTC m=+2906.456424455" watchObservedRunningTime="2026-01-30 22:05:17.676792902 +0000 UTC m=+2906.468694148" Jan 30 22:05:18 crc kubenswrapper[4721]: I0130 22:05:18.656873 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n9ntc" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="registry-server" containerID="cri-o://734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b" gracePeriod=2 Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.282517 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.395885 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nthwq\" (UniqueName: \"kubernetes.io/projected/5159a1c7-3608-4300-941d-704879490fc4-kube-api-access-nthwq\") pod \"5159a1c7-3608-4300-941d-704879490fc4\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.396107 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-catalog-content\") pod \"5159a1c7-3608-4300-941d-704879490fc4\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.396216 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-utilities\") pod \"5159a1c7-3608-4300-941d-704879490fc4\" (UID: \"5159a1c7-3608-4300-941d-704879490fc4\") " Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.397521 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-utilities" (OuterVolumeSpecName: "utilities") pod "5159a1c7-3608-4300-941d-704879490fc4" (UID: "5159a1c7-3608-4300-941d-704879490fc4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.405269 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5159a1c7-3608-4300-941d-704879490fc4-kube-api-access-nthwq" (OuterVolumeSpecName: "kube-api-access-nthwq") pod "5159a1c7-3608-4300-941d-704879490fc4" (UID: "5159a1c7-3608-4300-941d-704879490fc4"). InnerVolumeSpecName "kube-api-access-nthwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.498436 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nthwq\" (UniqueName: \"kubernetes.io/projected/5159a1c7-3608-4300-941d-704879490fc4-kube-api-access-nthwq\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.498731 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.532402 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5159a1c7-3608-4300-941d-704879490fc4" (UID: "5159a1c7-3608-4300-941d-704879490fc4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.600793 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5159a1c7-3608-4300-941d-704879490fc4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.669397 4721 generic.go:334] "Generic (PLEG): container finished" podID="5159a1c7-3608-4300-941d-704879490fc4" containerID="734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b" exitCode=0 Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.669452 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerDied","Data":"734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b"} Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.669491 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9ntc" event={"ID":"5159a1c7-3608-4300-941d-704879490fc4","Type":"ContainerDied","Data":"d31d56dc74abba80b6b61e5b9f45747015a3dcd4a14407f71ff0d5e896d5ce1a"} Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.669492 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9ntc" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.669515 4721 scope.go:117] "RemoveContainer" containerID="734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.704893 4721 scope.go:117] "RemoveContainer" containerID="90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.707611 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n9ntc"] Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.717984 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n9ntc"] Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.733650 4721 scope.go:117] "RemoveContainer" containerID="88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.782692 4721 scope.go:117] "RemoveContainer" containerID="734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b" Jan 30 22:05:19 crc kubenswrapper[4721]: E0130 22:05:19.783359 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b\": container with ID starting with 734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b not found: ID does not exist" containerID="734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.783394 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b"} err="failed to get container status \"734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b\": rpc error: code = NotFound desc = could not find container \"734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b\": container with ID starting with 734d35cfb49b4648fa35d8bc99eb95a98d33ac1a74f4709c8c295e39c8419d2b not found: ID does not exist" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.783428 4721 scope.go:117] "RemoveContainer" containerID="90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1" Jan 30 22:05:19 crc kubenswrapper[4721]: E0130 22:05:19.783798 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1\": container with ID starting with 90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1 not found: ID does not exist" containerID="90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.783827 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1"} err="failed to get container status \"90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1\": rpc error: code = NotFound desc = could not find container \"90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1\": container with ID starting with 90dc217648784a08be96419af4bf86828b36b058ad382476e78bbf6fa75b54b1 not found: ID does not exist" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.783844 4721 scope.go:117] "RemoveContainer" containerID="88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1" Jan 30 22:05:19 crc kubenswrapper[4721]: E0130 22:05:19.784139 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1\": container with ID starting with 88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1 not found: ID does not exist" containerID="88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1" Jan 30 22:05:19 crc kubenswrapper[4721]: I0130 22:05:19.784161 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1"} err="failed to get container status \"88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1\": rpc error: code = NotFound desc = could not find container \"88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1\": container with ID starting with 88199e289bf39c898c57b245bd0497e77045840685bf1765dc9080d8b0ff6bc1 not found: ID does not exist" Jan 30 22:05:20 crc kubenswrapper[4721]: I0130 22:05:20.127369 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5159a1c7-3608-4300-941d-704879490fc4" path="/var/lib/kubelet/pods/5159a1c7-3608-4300-941d-704879490fc4/volumes" Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.448363 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.448993 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.449043 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.449885 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.449939 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" gracePeriod=600 Jan 30 22:05:29 crc kubenswrapper[4721]: E0130 22:05:29.638319 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.766078 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" exitCode=0 Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.766149 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d"} Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.766197 4721 scope.go:117] "RemoveContainer" containerID="bf92663f77af193c5a48bd2b97d08d0766d0048fc46d63b788adab121454a826" Jan 30 22:05:29 crc kubenswrapper[4721]: I0130 22:05:29.767205 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:05:29 crc kubenswrapper[4721]: E0130 22:05:29.767700 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:05:41 crc kubenswrapper[4721]: I0130 22:05:41.092138 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:05:41 crc kubenswrapper[4721]: E0130 22:05:41.093144 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:05:52 crc kubenswrapper[4721]: I0130 22:05:52.098499 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:05:52 crc kubenswrapper[4721]: E0130 22:05:52.099406 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:06:05 crc kubenswrapper[4721]: I0130 22:06:05.092196 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:06:05 crc kubenswrapper[4721]: E0130 22:06:05.092989 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:06:16 crc kubenswrapper[4721]: I0130 22:06:16.092504 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:06:16 crc kubenswrapper[4721]: E0130 22:06:16.093518 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:06:28 crc kubenswrapper[4721]: I0130 22:06:28.092494 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:06:28 crc kubenswrapper[4721]: E0130 22:06:28.093335 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:06:42 crc kubenswrapper[4721]: I0130 22:06:42.100423 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:06:42 crc kubenswrapper[4721]: E0130 22:06:42.101507 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:06:56 crc kubenswrapper[4721]: I0130 22:06:56.092999 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:06:56 crc kubenswrapper[4721]: E0130 22:06:56.093991 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:07:11 crc kubenswrapper[4721]: I0130 22:07:11.092574 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:07:11 crc kubenswrapper[4721]: E0130 22:07:11.093957 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:07:22 crc kubenswrapper[4721]: I0130 22:07:22.099896 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:07:22 crc kubenswrapper[4721]: E0130 22:07:22.100869 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:07:34 crc kubenswrapper[4721]: I0130 22:07:34.092881 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:07:34 crc kubenswrapper[4721]: E0130 22:07:34.094082 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:07:46 crc kubenswrapper[4721]: I0130 22:07:46.093585 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:07:46 crc kubenswrapper[4721]: E0130 22:07:46.094453 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:08:00 crc kubenswrapper[4721]: I0130 22:08:00.092370 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:08:00 crc kubenswrapper[4721]: E0130 22:08:00.093089 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:08:15 crc kubenswrapper[4721]: I0130 22:08:15.092460 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:08:15 crc kubenswrapper[4721]: E0130 22:08:15.093323 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.606120 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4f4jt"] Jan 30 22:08:21 crc kubenswrapper[4721]: E0130 22:08:21.607345 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="extract-content" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.607362 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="extract-content" Jan 30 22:08:21 crc kubenswrapper[4721]: E0130 22:08:21.607425 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="extract-utilities" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.607433 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="extract-utilities" Jan 30 22:08:21 crc kubenswrapper[4721]: E0130 22:08:21.607441 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="registry-server" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.607448 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="registry-server" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.607747 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5159a1c7-3608-4300-941d-704879490fc4" containerName="registry-server" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.617172 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4f4jt"] Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.617330 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.754728 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-catalog-content\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.754823 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-utilities\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.756786 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrlnz\" (UniqueName: \"kubernetes.io/projected/867e368a-f473-4418-b030-2007580c1ba5-kube-api-access-qrlnz\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.858838 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-catalog-content\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.858914 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-utilities\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.858994 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrlnz\" (UniqueName: \"kubernetes.io/projected/867e368a-f473-4418-b030-2007580c1ba5-kube-api-access-qrlnz\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.859358 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-catalog-content\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.859746 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-utilities\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.892329 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrlnz\" (UniqueName: \"kubernetes.io/projected/867e368a-f473-4418-b030-2007580c1ba5-kube-api-access-qrlnz\") pod \"community-operators-4f4jt\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:21 crc kubenswrapper[4721]: I0130 22:08:21.950618 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:22 crc kubenswrapper[4721]: I0130 22:08:22.584391 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4f4jt"] Jan 30 22:08:23 crc kubenswrapper[4721]: I0130 22:08:23.462751 4721 generic.go:334] "Generic (PLEG): container finished" podID="867e368a-f473-4418-b030-2007580c1ba5" containerID="98532821162a0451209558379f3f92ceb6c18d2b3d96018e376468568e47e4e3" exitCode=0 Jan 30 22:08:23 crc kubenswrapper[4721]: I0130 22:08:23.463091 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerDied","Data":"98532821162a0451209558379f3f92ceb6c18d2b3d96018e376468568e47e4e3"} Jan 30 22:08:23 crc kubenswrapper[4721]: I0130 22:08:23.463128 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerStarted","Data":"417a1889149ca2e7d1015e0b34b5014ab6201982db3355d252127db6793004f9"} Jan 30 22:08:24 crc kubenswrapper[4721]: I0130 22:08:24.887630 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tksbt"] Jan 30 22:08:24 crc kubenswrapper[4721]: I0130 22:08:24.890373 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:24 crc kubenswrapper[4721]: I0130 22:08:24.913062 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tksbt"] Jan 30 22:08:24 crc kubenswrapper[4721]: I0130 22:08:24.935120 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-catalog-content\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:24 crc kubenswrapper[4721]: I0130 22:08:24.935241 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-utilities\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:24 crc kubenswrapper[4721]: I0130 22:08:24.935265 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b7tq\" (UniqueName: \"kubernetes.io/projected/269d43c5-9489-442f-a221-d0d75c96fc3c-kube-api-access-5b7tq\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.037559 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-utilities\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.037613 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b7tq\" (UniqueName: \"kubernetes.io/projected/269d43c5-9489-442f-a221-d0d75c96fc3c-kube-api-access-5b7tq\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.037744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-catalog-content\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.038150 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-utilities\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.038241 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-catalog-content\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.062725 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b7tq\" (UniqueName: \"kubernetes.io/projected/269d43c5-9489-442f-a221-d0d75c96fc3c-kube-api-access-5b7tq\") pod \"redhat-marketplace-tksbt\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.226106 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.497343 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerStarted","Data":"dc8f65d3e2ae5233fe57752a8c8bbcad6104048fca3a54e800ca2907c4c2bc06"} Jan 30 22:08:25 crc kubenswrapper[4721]: I0130 22:08:25.792498 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tksbt"] Jan 30 22:08:25 crc kubenswrapper[4721]: W0130 22:08:25.798587 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod269d43c5_9489_442f_a221_d0d75c96fc3c.slice/crio-1fdd387f107a8b69664494e7bd1f8336198a68ba4bbc5a1528cd726a9a10475f WatchSource:0}: Error finding container 1fdd387f107a8b69664494e7bd1f8336198a68ba4bbc5a1528cd726a9a10475f: Status 404 returned error can't find the container with id 1fdd387f107a8b69664494e7bd1f8336198a68ba4bbc5a1528cd726a9a10475f Jan 30 22:08:26 crc kubenswrapper[4721]: I0130 22:08:26.510285 4721 generic.go:334] "Generic (PLEG): container finished" podID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerID="bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50" exitCode=0 Jan 30 22:08:26 crc kubenswrapper[4721]: I0130 22:08:26.510364 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerDied","Data":"bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50"} Jan 30 22:08:26 crc kubenswrapper[4721]: I0130 22:08:26.510827 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerStarted","Data":"1fdd387f107a8b69664494e7bd1f8336198a68ba4bbc5a1528cd726a9a10475f"} Jan 30 22:08:27 crc kubenswrapper[4721]: I0130 22:08:27.522726 4721 generic.go:334] "Generic (PLEG): container finished" podID="867e368a-f473-4418-b030-2007580c1ba5" containerID="dc8f65d3e2ae5233fe57752a8c8bbcad6104048fca3a54e800ca2907c4c2bc06" exitCode=0 Jan 30 22:08:27 crc kubenswrapper[4721]: I0130 22:08:27.522815 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerDied","Data":"dc8f65d3e2ae5233fe57752a8c8bbcad6104048fca3a54e800ca2907c4c2bc06"} Jan 30 22:08:28 crc kubenswrapper[4721]: I0130 22:08:28.092221 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:08:28 crc kubenswrapper[4721]: E0130 22:08:28.092713 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:08:28 crc kubenswrapper[4721]: I0130 22:08:28.564947 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerStarted","Data":"15ac1d68f42ab989bc348eabf3c6c04b0c85d7763d70df2d6cc58657abd8d341"} Jan 30 22:08:28 crc kubenswrapper[4721]: I0130 22:08:28.585004 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerStarted","Data":"ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613"} Jan 30 22:08:28 crc kubenswrapper[4721]: I0130 22:08:28.612724 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4f4jt" podStartSLOduration=3.058184654 podStartE2EDuration="7.612699169s" podCreationTimestamp="2026-01-30 22:08:21 +0000 UTC" firstStartedPulling="2026-01-30 22:08:23.470840455 +0000 UTC m=+3092.262741701" lastFinishedPulling="2026-01-30 22:08:28.02535497 +0000 UTC m=+3096.817256216" observedRunningTime="2026-01-30 22:08:28.602731487 +0000 UTC m=+3097.394632723" watchObservedRunningTime="2026-01-30 22:08:28.612699169 +0000 UTC m=+3097.404600425" Jan 30 22:08:29 crc kubenswrapper[4721]: I0130 22:08:29.602281 4721 generic.go:334] "Generic (PLEG): container finished" podID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerID="ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613" exitCode=0 Jan 30 22:08:29 crc kubenswrapper[4721]: I0130 22:08:29.602436 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerDied","Data":"ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613"} Jan 30 22:08:30 crc kubenswrapper[4721]: I0130 22:08:30.617077 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerStarted","Data":"ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0"} Jan 30 22:08:30 crc kubenswrapper[4721]: I0130 22:08:30.643496 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tksbt" podStartSLOduration=3.72723639 podStartE2EDuration="6.643474119s" podCreationTimestamp="2026-01-30 22:08:24 +0000 UTC" firstStartedPulling="2026-01-30 22:08:26.513269755 +0000 UTC m=+3095.305171001" lastFinishedPulling="2026-01-30 22:08:29.429507484 +0000 UTC m=+3098.221408730" observedRunningTime="2026-01-30 22:08:30.641396645 +0000 UTC m=+3099.433297891" watchObservedRunningTime="2026-01-30 22:08:30.643474119 +0000 UTC m=+3099.435375365" Jan 30 22:08:31 crc kubenswrapper[4721]: I0130 22:08:31.951155 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:31 crc kubenswrapper[4721]: I0130 22:08:31.951229 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:32 crc kubenswrapper[4721]: I0130 22:08:32.010027 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:35 crc kubenswrapper[4721]: I0130 22:08:35.226667 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:35 crc kubenswrapper[4721]: I0130 22:08:35.227267 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:35 crc kubenswrapper[4721]: I0130 22:08:35.283688 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:35 crc kubenswrapper[4721]: I0130 22:08:35.711555 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:38 crc kubenswrapper[4721]: I0130 22:08:38.770737 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tksbt"] Jan 30 22:08:38 crc kubenswrapper[4721]: I0130 22:08:38.771370 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tksbt" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="registry-server" containerID="cri-o://ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0" gracePeriod=2 Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.332153 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.382655 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b7tq\" (UniqueName: \"kubernetes.io/projected/269d43c5-9489-442f-a221-d0d75c96fc3c-kube-api-access-5b7tq\") pod \"269d43c5-9489-442f-a221-d0d75c96fc3c\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.382793 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-utilities\") pod \"269d43c5-9489-442f-a221-d0d75c96fc3c\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.383387 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-catalog-content\") pod \"269d43c5-9489-442f-a221-d0d75c96fc3c\" (UID: \"269d43c5-9489-442f-a221-d0d75c96fc3c\") " Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.384043 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-utilities" (OuterVolumeSpecName: "utilities") pod "269d43c5-9489-442f-a221-d0d75c96fc3c" (UID: "269d43c5-9489-442f-a221-d0d75c96fc3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.392889 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/269d43c5-9489-442f-a221-d0d75c96fc3c-kube-api-access-5b7tq" (OuterVolumeSpecName: "kube-api-access-5b7tq") pod "269d43c5-9489-442f-a221-d0d75c96fc3c" (UID: "269d43c5-9489-442f-a221-d0d75c96fc3c"). InnerVolumeSpecName "kube-api-access-5b7tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.413139 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "269d43c5-9489-442f-a221-d0d75c96fc3c" (UID: "269d43c5-9489-442f-a221-d0d75c96fc3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.487570 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.487639 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b7tq\" (UniqueName: \"kubernetes.io/projected/269d43c5-9489-442f-a221-d0d75c96fc3c-kube-api-access-5b7tq\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.487659 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269d43c5-9489-442f-a221-d0d75c96fc3c-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.719071 4721 generic.go:334] "Generic (PLEG): container finished" podID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerID="ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0" exitCode=0 Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.719119 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerDied","Data":"ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0"} Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.719159 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tksbt" event={"ID":"269d43c5-9489-442f-a221-d0d75c96fc3c","Type":"ContainerDied","Data":"1fdd387f107a8b69664494e7bd1f8336198a68ba4bbc5a1528cd726a9a10475f"} Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.719162 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tksbt" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.719181 4721 scope.go:117] "RemoveContainer" containerID="ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.741936 4721 scope.go:117] "RemoveContainer" containerID="ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.757505 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tksbt"] Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.769358 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tksbt"] Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.779239 4721 scope.go:117] "RemoveContainer" containerID="bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.818521 4721 scope.go:117] "RemoveContainer" containerID="ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0" Jan 30 22:08:39 crc kubenswrapper[4721]: E0130 22:08:39.818988 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0\": container with ID starting with ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0 not found: ID does not exist" containerID="ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.819034 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0"} err="failed to get container status \"ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0\": rpc error: code = NotFound desc = could not find container \"ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0\": container with ID starting with ca7bfcb3f97f8daf496cb2b54bf2c3464edaca7dc2f87d884a28f4c554f742b0 not found: ID does not exist" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.819064 4721 scope.go:117] "RemoveContainer" containerID="ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613" Jan 30 22:08:39 crc kubenswrapper[4721]: E0130 22:08:39.819516 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613\": container with ID starting with ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613 not found: ID does not exist" containerID="ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.819557 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613"} err="failed to get container status \"ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613\": rpc error: code = NotFound desc = could not find container \"ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613\": container with ID starting with ee790cb5b4e1d8cc788ab78e1dc7a33e6da484ad77dcac03dd6afd2d80888613 not found: ID does not exist" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.819583 4721 scope.go:117] "RemoveContainer" containerID="bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50" Jan 30 22:08:39 crc kubenswrapper[4721]: E0130 22:08:39.819914 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50\": container with ID starting with bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50 not found: ID does not exist" containerID="bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50" Jan 30 22:08:39 crc kubenswrapper[4721]: I0130 22:08:39.819933 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50"} err="failed to get container status \"bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50\": rpc error: code = NotFound desc = could not find container \"bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50\": container with ID starting with bdcda2ccc14ab724c43d41ed3ada7d85cc15babbe9e2ad26c6b99f5ce6d67b50 not found: ID does not exist" Jan 30 22:08:40 crc kubenswrapper[4721]: I0130 22:08:40.092750 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:08:40 crc kubenswrapper[4721]: E0130 22:08:40.093632 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:08:40 crc kubenswrapper[4721]: I0130 22:08:40.104158 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" path="/var/lib/kubelet/pods/269d43c5-9489-442f-a221-d0d75c96fc3c/volumes" Jan 30 22:08:42 crc kubenswrapper[4721]: I0130 22:08:42.015371 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:46 crc kubenswrapper[4721]: I0130 22:08:46.569988 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4f4jt"] Jan 30 22:08:46 crc kubenswrapper[4721]: I0130 22:08:46.570535 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4f4jt" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="registry-server" containerID="cri-o://15ac1d68f42ab989bc348eabf3c6c04b0c85d7763d70df2d6cc58657abd8d341" gracePeriod=2 Jan 30 22:08:46 crc kubenswrapper[4721]: I0130 22:08:46.807601 4721 generic.go:334] "Generic (PLEG): container finished" podID="867e368a-f473-4418-b030-2007580c1ba5" containerID="15ac1d68f42ab989bc348eabf3c6c04b0c85d7763d70df2d6cc58657abd8d341" exitCode=0 Jan 30 22:08:46 crc kubenswrapper[4721]: I0130 22:08:46.808083 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerDied","Data":"15ac1d68f42ab989bc348eabf3c6c04b0c85d7763d70df2d6cc58657abd8d341"} Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.171695 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.189557 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-utilities\") pod \"867e368a-f473-4418-b030-2007580c1ba5\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.189639 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-catalog-content\") pod \"867e368a-f473-4418-b030-2007580c1ba5\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.189719 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrlnz\" (UniqueName: \"kubernetes.io/projected/867e368a-f473-4418-b030-2007580c1ba5-kube-api-access-qrlnz\") pod \"867e368a-f473-4418-b030-2007580c1ba5\" (UID: \"867e368a-f473-4418-b030-2007580c1ba5\") " Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.190772 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-utilities" (OuterVolumeSpecName: "utilities") pod "867e368a-f473-4418-b030-2007580c1ba5" (UID: "867e368a-f473-4418-b030-2007580c1ba5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.196822 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/867e368a-f473-4418-b030-2007580c1ba5-kube-api-access-qrlnz" (OuterVolumeSpecName: "kube-api-access-qrlnz") pod "867e368a-f473-4418-b030-2007580c1ba5" (UID: "867e368a-f473-4418-b030-2007580c1ba5"). InnerVolumeSpecName "kube-api-access-qrlnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.259368 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "867e368a-f473-4418-b030-2007580c1ba5" (UID: "867e368a-f473-4418-b030-2007580c1ba5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.292681 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.292737 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/867e368a-f473-4418-b030-2007580c1ba5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.292754 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrlnz\" (UniqueName: \"kubernetes.io/projected/867e368a-f473-4418-b030-2007580c1ba5-kube-api-access-qrlnz\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.822901 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4f4jt" event={"ID":"867e368a-f473-4418-b030-2007580c1ba5","Type":"ContainerDied","Data":"417a1889149ca2e7d1015e0b34b5014ab6201982db3355d252127db6793004f9"} Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.822949 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4f4jt" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.822972 4721 scope.go:117] "RemoveContainer" containerID="15ac1d68f42ab989bc348eabf3c6c04b0c85d7763d70df2d6cc58657abd8d341" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.846787 4721 scope.go:117] "RemoveContainer" containerID="dc8f65d3e2ae5233fe57752a8c8bbcad6104048fca3a54e800ca2907c4c2bc06" Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.870403 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4f4jt"] Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.885848 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4f4jt"] Jan 30 22:08:47 crc kubenswrapper[4721]: I0130 22:08:47.894966 4721 scope.go:117] "RemoveContainer" containerID="98532821162a0451209558379f3f92ceb6c18d2b3d96018e376468568e47e4e3" Jan 30 22:08:48 crc kubenswrapper[4721]: I0130 22:08:48.105704 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="867e368a-f473-4418-b030-2007580c1ba5" path="/var/lib/kubelet/pods/867e368a-f473-4418-b030-2007580c1ba5/volumes" Jan 30 22:08:51 crc kubenswrapper[4721]: I0130 22:08:51.093315 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:08:51 crc kubenswrapper[4721]: E0130 22:08:51.094215 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:08:56 crc kubenswrapper[4721]: I0130 22:08:56.909921 4721 generic.go:334] "Generic (PLEG): container finished" podID="75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" containerID="b65943ed83bc4b8ad8fdef320ed37fe519ef288e69be8ba42f48cb262752b2c2" exitCode=0 Jan 30 22:08:56 crc kubenswrapper[4721]: I0130 22:08:56.910028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" event={"ID":"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417","Type":"ContainerDied","Data":"b65943ed83bc4b8ad8fdef320ed37fe519ef288e69be8ba42f48cb262752b2c2"} Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.492916 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.571093 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-combined-ca-bundle\") pod \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.571184 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-secret-0\") pod \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.572014 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-ssh-key-openstack-edpm-ipam\") pod \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.572218 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85db6\" (UniqueName: \"kubernetes.io/projected/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-kube-api-access-85db6\") pod \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.572368 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-inventory\") pod \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\" (UID: \"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417\") " Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.578784 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-kube-api-access-85db6" (OuterVolumeSpecName: "kube-api-access-85db6") pod "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" (UID: "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417"). InnerVolumeSpecName "kube-api-access-85db6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.579573 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" (UID: "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.607968 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" (UID: "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.608206 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-inventory" (OuterVolumeSpecName: "inventory") pod "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" (UID: "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.612896 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" (UID: "75dd1a6a-3fe9-4016-bdb1-bbc9ec572417"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.675843 4721 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.675893 4721 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.675907 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.675919 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85db6\" (UniqueName: \"kubernetes.io/projected/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-kube-api-access-85db6\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.675930 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75dd1a6a-3fe9-4016-bdb1-bbc9ec572417-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.932106 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" event={"ID":"75dd1a6a-3fe9-4016-bdb1-bbc9ec572417","Type":"ContainerDied","Data":"ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360"} Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.932151 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360" Jan 30 22:08:58 crc kubenswrapper[4721]: I0130 22:08:58.932187 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.044372 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp"] Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045061 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="extract-content" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045084 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="extract-content" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045108 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="extract-utilities" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045117 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="extract-utilities" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045141 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045150 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045174 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="extract-utilities" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045182 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="extract-utilities" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045200 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="registry-server" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045208 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="registry-server" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045228 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="registry-server" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045236 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="registry-server" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.045252 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="extract-content" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045260 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="extract-content" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045578 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="75dd1a6a-3fe9-4016-bdb1-bbc9ec572417" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045601 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="269d43c5-9489-442f-a221-d0d75c96fc3c" containerName="registry-server" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.045635 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="867e368a-f473-4418-b030-2007580c1ba5" containerName="registry-server" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.046748 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.051590 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.051895 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.052070 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.052117 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.052173 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.052448 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.052603 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.064143 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp"] Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.084680 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.084770 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.084870 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.084944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.084973 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxlgj\" (UniqueName: \"kubernetes.io/projected/e148581e-1ed2-4532-a179-f1491d58dc0e-kube-api-access-xxlgj\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.085001 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.085044 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.085162 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.085198 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: E0130 22:08:59.161175 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75dd1a6a_3fe9_4016_bdb1_bbc9ec572417.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75dd1a6a_3fe9_4016_bdb1_bbc9ec572417.slice/crio-ad71eae243babe5ab300a72c77cac57b95f714ff4a27f7450a3a6bc7f323c360\": RecentStats: unable to find data in memory cache]" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188409 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188466 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188533 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188576 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188609 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188688 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188709 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxlgj\" (UniqueName: \"kubernetes.io/projected/e148581e-1ed2-4532-a179-f1491d58dc0e-kube-api-access-xxlgj\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188727 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.188926 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.190630 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.194259 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.193747 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.196605 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.196999 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.198283 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.198566 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.199023 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.207116 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxlgj\" (UniqueName: \"kubernetes.io/projected/e148581e-1ed2-4532-a179-f1491d58dc0e-kube-api-access-xxlgj\") pod \"nova-edpm-deployment-openstack-edpm-ipam-g4xdp\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:08:59 crc kubenswrapper[4721]: I0130 22:08:59.382635 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:09:00 crc kubenswrapper[4721]: I0130 22:09:00.011128 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 22:09:00 crc kubenswrapper[4721]: I0130 22:09:00.014753 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp"] Jan 30 22:09:00 crc kubenswrapper[4721]: I0130 22:09:00.952612 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" event={"ID":"e148581e-1ed2-4532-a179-f1491d58dc0e","Type":"ContainerStarted","Data":"bb8942dd75c08deca5d67a49383766a7622b4fb04a2ce2d9bde08e4521ad8b84"} Jan 30 22:09:00 crc kubenswrapper[4721]: I0130 22:09:00.952977 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" event={"ID":"e148581e-1ed2-4532-a179-f1491d58dc0e","Type":"ContainerStarted","Data":"16eae1a5ecf32284394eabe8bb9a619ea95bbfa38d5bda8b521289088e8fe108"} Jan 30 22:09:00 crc kubenswrapper[4721]: I0130 22:09:00.974533 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" podStartSLOduration=1.570701484 podStartE2EDuration="1.974514503s" podCreationTimestamp="2026-01-30 22:08:59 +0000 UTC" firstStartedPulling="2026-01-30 22:09:00.010883914 +0000 UTC m=+3128.802785160" lastFinishedPulling="2026-01-30 22:09:00.414696923 +0000 UTC m=+3129.206598179" observedRunningTime="2026-01-30 22:09:00.974372769 +0000 UTC m=+3129.766274025" watchObservedRunningTime="2026-01-30 22:09:00.974514503 +0000 UTC m=+3129.766415749" Jan 30 22:09:03 crc kubenswrapper[4721]: I0130 22:09:03.092108 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:09:03 crc kubenswrapper[4721]: E0130 22:09:03.092720 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:09:16 crc kubenswrapper[4721]: I0130 22:09:16.093233 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:09:16 crc kubenswrapper[4721]: E0130 22:09:16.094202 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:09:31 crc kubenswrapper[4721]: I0130 22:09:31.092918 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:09:31 crc kubenswrapper[4721]: E0130 22:09:31.094197 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:09:44 crc kubenswrapper[4721]: I0130 22:09:44.093373 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:09:44 crc kubenswrapper[4721]: E0130 22:09:44.094759 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:09:57 crc kubenswrapper[4721]: I0130 22:09:57.092834 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:09:57 crc kubenswrapper[4721]: E0130 22:09:57.093965 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:10:12 crc kubenswrapper[4721]: I0130 22:10:12.098818 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:10:12 crc kubenswrapper[4721]: E0130 22:10:12.100919 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:10:23 crc kubenswrapper[4721]: I0130 22:10:23.092273 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:10:23 crc kubenswrapper[4721]: E0130 22:10:23.093277 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:10:37 crc kubenswrapper[4721]: I0130 22:10:37.092708 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:10:37 crc kubenswrapper[4721]: I0130 22:10:37.948655 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"8cd25e378ee88e963690a4f456d348dfee9431c8a64722a2914aa440d94fdf76"} Jan 30 22:11:00 crc kubenswrapper[4721]: I0130 22:11:00.159753 4721 generic.go:334] "Generic (PLEG): container finished" podID="e148581e-1ed2-4532-a179-f1491d58dc0e" containerID="bb8942dd75c08deca5d67a49383766a7622b4fb04a2ce2d9bde08e4521ad8b84" exitCode=0 Jan 30 22:11:00 crc kubenswrapper[4721]: I0130 22:11:00.159859 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" event={"ID":"e148581e-1ed2-4532-a179-f1491d58dc0e","Type":"ContainerDied","Data":"bb8942dd75c08deca5d67a49383766a7622b4fb04a2ce2d9bde08e4521ad8b84"} Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.687903 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.810492 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-extra-config-0\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.810697 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-0\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.810735 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxlgj\" (UniqueName: \"kubernetes.io/projected/e148581e-1ed2-4532-a179-f1491d58dc0e-kube-api-access-xxlgj\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.810888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-1\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.810936 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-0\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.810991 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-combined-ca-bundle\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.811036 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-ssh-key-openstack-edpm-ipam\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.811106 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-1\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.811135 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-inventory\") pod \"e148581e-1ed2-4532-a179-f1491d58dc0e\" (UID: \"e148581e-1ed2-4532-a179-f1491d58dc0e\") " Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.823823 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.829013 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e148581e-1ed2-4532-a179-f1491d58dc0e-kube-api-access-xxlgj" (OuterVolumeSpecName: "kube-api-access-xxlgj") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "kube-api-access-xxlgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.845846 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.846060 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.846176 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.846685 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.848354 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.850648 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.860710 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-inventory" (OuterVolumeSpecName: "inventory") pod "e148581e-1ed2-4532-a179-f1491d58dc0e" (UID: "e148581e-1ed2-4532-a179-f1491d58dc0e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915676 4721 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915717 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915731 4721 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915747 4721 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915758 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxlgj\" (UniqueName: \"kubernetes.io/projected/e148581e-1ed2-4532-a179-f1491d58dc0e-kube-api-access-xxlgj\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915771 4721 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915782 4721 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915794 4721 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:01 crc kubenswrapper[4721]: I0130 22:11:01.915805 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e148581e-1ed2-4532-a179-f1491d58dc0e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.187728 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" event={"ID":"e148581e-1ed2-4532-a179-f1491d58dc0e","Type":"ContainerDied","Data":"16eae1a5ecf32284394eabe8bb9a619ea95bbfa38d5bda8b521289088e8fe108"} Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.188164 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16eae1a5ecf32284394eabe8bb9a619ea95bbfa38d5bda8b521289088e8fe108" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.188287 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-g4xdp" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.293517 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj"] Jan 30 22:11:02 crc kubenswrapper[4721]: E0130 22:11:02.294015 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e148581e-1ed2-4532-a179-f1491d58dc0e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.294036 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e148581e-1ed2-4532-a179-f1491d58dc0e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.294235 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e148581e-1ed2-4532-a179-f1491d58dc0e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.295068 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.297399 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.298778 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.298833 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b9l8d" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.301367 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.301576 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.303912 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj"] Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.425505 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.425601 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.425744 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.425788 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.426113 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.426188 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgq6c\" (UniqueName: \"kubernetes.io/projected/bb52513c-6253-41f2-aa93-808d6b9cbb62-kube-api-access-rgq6c\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.426275 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.528024 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.528126 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.528172 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.528190 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.529192 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.529662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgq6c\" (UniqueName: \"kubernetes.io/projected/bb52513c-6253-41f2-aa93-808d6b9cbb62-kube-api-access-rgq6c\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.529869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.533360 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.535227 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.535464 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.535938 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.536630 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.537027 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.553635 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgq6c\" (UniqueName: \"kubernetes.io/projected/bb52513c-6253-41f2-aa93-808d6b9cbb62-kube-api-access-rgq6c\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:02 crc kubenswrapper[4721]: I0130 22:11:02.639924 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:11:04 crc kubenswrapper[4721]: I0130 22:11:03.172596 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj"] Jan 30 22:11:04 crc kubenswrapper[4721]: I0130 22:11:03.200467 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" event={"ID":"bb52513c-6253-41f2-aa93-808d6b9cbb62","Type":"ContainerStarted","Data":"9f7a17cabe26b538e714e0f3976e00952853ef6ecc7117eb7375d9faac00cb35"} Jan 30 22:11:05 crc kubenswrapper[4721]: I0130 22:11:05.219789 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" event={"ID":"bb52513c-6253-41f2-aa93-808d6b9cbb62","Type":"ContainerStarted","Data":"9af9cdab53eba8c13e8b848141265cf75d86b7fc5f9e34568e92f4e51dd8d467"} Jan 30 22:11:05 crc kubenswrapper[4721]: I0130 22:11:05.247777 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" podStartSLOduration=2.184837179 podStartE2EDuration="3.247761568s" podCreationTimestamp="2026-01-30 22:11:02 +0000 UTC" firstStartedPulling="2026-01-30 22:11:03.179041423 +0000 UTC m=+3251.970942659" lastFinishedPulling="2026-01-30 22:11:04.241965802 +0000 UTC m=+3253.033867048" observedRunningTime="2026-01-30 22:11:05.242135752 +0000 UTC m=+3254.034036998" watchObservedRunningTime="2026-01-30 22:11:05.247761568 +0000 UTC m=+3254.039662814" Jan 30 22:12:59 crc kubenswrapper[4721]: I0130 22:12:59.448860 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:12:59 crc kubenswrapper[4721]: I0130 22:12:59.449354 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:13:11 crc kubenswrapper[4721]: I0130 22:13:11.457918 4721 generic.go:334] "Generic (PLEG): container finished" podID="bb52513c-6253-41f2-aa93-808d6b9cbb62" containerID="9af9cdab53eba8c13e8b848141265cf75d86b7fc5f9e34568e92f4e51dd8d467" exitCode=0 Jan 30 22:13:11 crc kubenswrapper[4721]: I0130 22:13:11.458008 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" event={"ID":"bb52513c-6253-41f2-aa93-808d6b9cbb62","Type":"ContainerDied","Data":"9af9cdab53eba8c13e8b848141265cf75d86b7fc5f9e34568e92f4e51dd8d467"} Jan 30 22:13:12 crc kubenswrapper[4721]: I0130 22:13:12.994755 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.103312 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-2\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.103607 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgq6c\" (UniqueName: \"kubernetes.io/projected/bb52513c-6253-41f2-aa93-808d6b9cbb62-kube-api-access-rgq6c\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.103738 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-0\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.103788 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-1\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.103888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ssh-key-openstack-edpm-ipam\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.104148 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-telemetry-combined-ca-bundle\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.104272 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-inventory\") pod \"bb52513c-6253-41f2-aa93-808d6b9cbb62\" (UID: \"bb52513c-6253-41f2-aa93-808d6b9cbb62\") " Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.112727 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.117620 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb52513c-6253-41f2-aa93-808d6b9cbb62-kube-api-access-rgq6c" (OuterVolumeSpecName: "kube-api-access-rgq6c") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "kube-api-access-rgq6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.141231 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.145485 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.156604 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.157136 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.170415 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-inventory" (OuterVolumeSpecName: "inventory") pod "bb52513c-6253-41f2-aa93-808d6b9cbb62" (UID: "bb52513c-6253-41f2-aa93-808d6b9cbb62"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207413 4721 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207470 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207491 4721 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207508 4721 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-inventory\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207526 4721 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207544 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgq6c\" (UniqueName: \"kubernetes.io/projected/bb52513c-6253-41f2-aa93-808d6b9cbb62-kube-api-access-rgq6c\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.207562 4721 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb52513c-6253-41f2-aa93-808d6b9cbb62-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.478932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" event={"ID":"bb52513c-6253-41f2-aa93-808d6b9cbb62","Type":"ContainerDied","Data":"9f7a17cabe26b538e714e0f3976e00952853ef6ecc7117eb7375d9faac00cb35"} Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.478988 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f7a17cabe26b538e714e0f3976e00952853ef6ecc7117eb7375d9faac00cb35" Jan 30 22:13:13 crc kubenswrapper[4721]: I0130 22:13:13.479227 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj" Jan 30 22:13:29 crc kubenswrapper[4721]: I0130 22:13:29.448422 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:13:29 crc kubenswrapper[4721]: I0130 22:13:29.449039 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.448105 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.448742 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.448794 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.449584 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8cd25e378ee88e963690a4f456d348dfee9431c8a64722a2914aa440d94fdf76"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.449642 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://8cd25e378ee88e963690a4f456d348dfee9431c8a64722a2914aa440d94fdf76" gracePeriod=600 Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.908926 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="8cd25e378ee88e963690a4f456d348dfee9431c8a64722a2914aa440d94fdf76" exitCode=0 Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.908988 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"8cd25e378ee88e963690a4f456d348dfee9431c8a64722a2914aa440d94fdf76"} Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.909312 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f"} Jan 30 22:13:59 crc kubenswrapper[4721]: I0130 22:13:59.909339 4721 scope.go:117] "RemoveContainer" containerID="a490aa4a5353da9a6774b9b3c25be561ab0a4c5b0447f56b0f66f98a0904698d" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.809805 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bk64r"] Jan 30 22:14:23 crc kubenswrapper[4721]: E0130 22:14:23.814205 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb52513c-6253-41f2-aa93-808d6b9cbb62" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.814229 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb52513c-6253-41f2-aa93-808d6b9cbb62" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.814495 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb52513c-6253-41f2-aa93-808d6b9cbb62" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.816144 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.822750 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bk64r"] Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.992230 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m9wq\" (UniqueName: \"kubernetes.io/projected/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-kube-api-access-8m9wq\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.992682 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-catalog-content\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:23 crc kubenswrapper[4721]: I0130 22:14:23.992850 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-utilities\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.094736 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-utilities\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.095186 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m9wq\" (UniqueName: \"kubernetes.io/projected/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-kube-api-access-8m9wq\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.095289 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-utilities\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.095640 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-catalog-content\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.096036 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-catalog-content\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.134270 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m9wq\" (UniqueName: \"kubernetes.io/projected/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-kube-api-access-8m9wq\") pod \"certified-operators-bk64r\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.146579 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:24 crc kubenswrapper[4721]: I0130 22:14:24.708547 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bk64r"] Jan 30 22:14:25 crc kubenswrapper[4721]: I0130 22:14:25.155709 4721 generic.go:334] "Generic (PLEG): container finished" podID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerID="f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab" exitCode=0 Jan 30 22:14:25 crc kubenswrapper[4721]: I0130 22:14:25.155952 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerDied","Data":"f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab"} Jan 30 22:14:25 crc kubenswrapper[4721]: I0130 22:14:25.156106 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerStarted","Data":"0e200e3de4c032cf63f2f377c8f743e69bb22e05cf7a568b1388a69e47809587"} Jan 30 22:14:25 crc kubenswrapper[4721]: I0130 22:14:25.159576 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 22:14:27 crc kubenswrapper[4721]: I0130 22:14:27.177699 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerStarted","Data":"a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09"} Jan 30 22:14:29 crc kubenswrapper[4721]: I0130 22:14:29.208923 4721 generic.go:334] "Generic (PLEG): container finished" podID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerID="a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09" exitCode=0 Jan 30 22:14:29 crc kubenswrapper[4721]: I0130 22:14:29.209077 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerDied","Data":"a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09"} Jan 30 22:14:30 crc kubenswrapper[4721]: I0130 22:14:30.221530 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerStarted","Data":"732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07"} Jan 30 22:14:30 crc kubenswrapper[4721]: I0130 22:14:30.241613 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bk64r" podStartSLOduration=2.65453865 podStartE2EDuration="7.24158846s" podCreationTimestamp="2026-01-30 22:14:23 +0000 UTC" firstStartedPulling="2026-01-30 22:14:25.159227405 +0000 UTC m=+3453.951128661" lastFinishedPulling="2026-01-30 22:14:29.746277225 +0000 UTC m=+3458.538178471" observedRunningTime="2026-01-30 22:14:30.241247579 +0000 UTC m=+3459.033148825" watchObservedRunningTime="2026-01-30 22:14:30.24158846 +0000 UTC m=+3459.033489716" Jan 30 22:14:34 crc kubenswrapper[4721]: I0130 22:14:34.147495 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:34 crc kubenswrapper[4721]: I0130 22:14:34.148271 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:34 crc kubenswrapper[4721]: I0130 22:14:34.194831 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:34 crc kubenswrapper[4721]: I0130 22:14:34.301528 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:34 crc kubenswrapper[4721]: I0130 22:14:34.434086 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bk64r"] Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.274019 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bk64r" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="registry-server" containerID="cri-o://732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07" gracePeriod=2 Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.843259 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.984845 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m9wq\" (UniqueName: \"kubernetes.io/projected/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-kube-api-access-8m9wq\") pod \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.985112 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-utilities\") pod \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.985514 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-catalog-content\") pod \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\" (UID: \"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3\") " Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.986116 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-utilities" (OuterVolumeSpecName: "utilities") pod "2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" (UID: "2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.987082 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:14:36 crc kubenswrapper[4721]: I0130 22:14:36.991152 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-kube-api-access-8m9wq" (OuterVolumeSpecName: "kube-api-access-8m9wq") pod "2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" (UID: "2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3"). InnerVolumeSpecName "kube-api-access-8m9wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.048606 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" (UID: "2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.089196 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.089241 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m9wq\" (UniqueName: \"kubernetes.io/projected/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3-kube-api-access-8m9wq\") on node \"crc\" DevicePath \"\"" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.285974 4721 generic.go:334] "Generic (PLEG): container finished" podID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerID="732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07" exitCode=0 Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.286020 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerDied","Data":"732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07"} Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.286053 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk64r" event={"ID":"2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3","Type":"ContainerDied","Data":"0e200e3de4c032cf63f2f377c8f743e69bb22e05cf7a568b1388a69e47809587"} Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.286080 4721 scope.go:117] "RemoveContainer" containerID="732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.286082 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk64r" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.307821 4721 scope.go:117] "RemoveContainer" containerID="a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.324383 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bk64r"] Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.337152 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bk64r"] Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.341211 4721 scope.go:117] "RemoveContainer" containerID="f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.395791 4721 scope.go:117] "RemoveContainer" containerID="732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07" Jan 30 22:14:37 crc kubenswrapper[4721]: E0130 22:14:37.396509 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07\": container with ID starting with 732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07 not found: ID does not exist" containerID="732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.396653 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07"} err="failed to get container status \"732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07\": rpc error: code = NotFound desc = could not find container \"732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07\": container with ID starting with 732e2f36fb847013616968e47115662438c4bbd9d9e3872d222dddef9d543d07 not found: ID does not exist" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.396776 4721 scope.go:117] "RemoveContainer" containerID="a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09" Jan 30 22:14:37 crc kubenswrapper[4721]: E0130 22:14:37.397671 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09\": container with ID starting with a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09 not found: ID does not exist" containerID="a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.397779 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09"} err="failed to get container status \"a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09\": rpc error: code = NotFound desc = could not find container \"a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09\": container with ID starting with a7365a58f3d8d47bf499fb58d9fb3ac124c853074c43ad78edbdfe3ab40ccb09 not found: ID does not exist" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.397876 4721 scope.go:117] "RemoveContainer" containerID="f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab" Jan 30 22:14:37 crc kubenswrapper[4721]: E0130 22:14:37.399562 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab\": container with ID starting with f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab not found: ID does not exist" containerID="f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab" Jan 30 22:14:37 crc kubenswrapper[4721]: I0130 22:14:37.399698 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab"} err="failed to get container status \"f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab\": rpc error: code = NotFound desc = could not find container \"f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab\": container with ID starting with f89fd843dc69b03f6092aa26704cb5ff8c7eaff26b4d723027815a355c645fab not found: ID does not exist" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.116025 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" path="/var/lib/kubelet/pods/2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3/volumes" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.603960 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Jan 30 22:14:38 crc kubenswrapper[4721]: E0130 22:14:38.604696 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="extract-content" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.604716 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="extract-content" Jan 30 22:14:38 crc kubenswrapper[4721]: E0130 22:14:38.604730 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="registry-server" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.604736 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="registry-server" Jan 30 22:14:38 crc kubenswrapper[4721]: E0130 22:14:38.604765 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="extract-utilities" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.604772 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="extract-utilities" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.604958 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c7b3eaa-d962-4c5d-84f7-6ed9b27c15d3" containerName="registry-server" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.605847 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.609033 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.609248 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.609256 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.609642 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-psmff" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.617038 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.619448 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.619595 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-config-data\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.619716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.721828 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.721982 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722370 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722523 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722584 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-config-data\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722674 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722755 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722800 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.722939 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9547f\" (UniqueName: \"kubernetes.io/projected/519f6e4a-b90c-4146-8ac9-d03854442bdd-kube-api-access-9547f\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.724603 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.725383 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-config-data\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.732777 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825126 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9547f\" (UniqueName: \"kubernetes.io/projected/519f6e4a-b90c-4146-8ac9-d03854442bdd-kube-api-access-9547f\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825253 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825282 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825399 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825531 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.825884 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.826568 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.830627 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.833131 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.850787 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9547f\" (UniqueName: \"kubernetes.io/projected/519f6e4a-b90c-4146-8ac9-d03854442bdd-kube-api-access-9547f\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.855377 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " pod="openstack/tempest-tests-tempest" Jan 30 22:14:38 crc kubenswrapper[4721]: I0130 22:14:38.924349 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 30 22:14:39 crc kubenswrapper[4721]: I0130 22:14:39.422636 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jan 30 22:14:39 crc kubenswrapper[4721]: W0130 22:14:39.431441 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod519f6e4a_b90c_4146_8ac9_d03854442bdd.slice/crio-24b6bcb2ae27c54c49c65d58c3951803135e1b9f5228b675862c5d67c368663a WatchSource:0}: Error finding container 24b6bcb2ae27c54c49c65d58c3951803135e1b9f5228b675862c5d67c368663a: Status 404 returned error can't find the container with id 24b6bcb2ae27c54c49c65d58c3951803135e1b9f5228b675862c5d67c368663a Jan 30 22:14:40 crc kubenswrapper[4721]: I0130 22:14:40.328012 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"519f6e4a-b90c-4146-8ac9-d03854442bdd","Type":"ContainerStarted","Data":"24b6bcb2ae27c54c49c65d58c3951803135e1b9f5228b675862c5d67c368663a"} Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.144486 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s"] Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.146634 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.149238 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.155110 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.157455 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s"] Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.253927 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74pzp\" (UniqueName: \"kubernetes.io/projected/c0bed957-c350-4ed9-a223-c7bd60953985-kube-api-access-74pzp\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.254067 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0bed957-c350-4ed9-a223-c7bd60953985-config-volume\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.254485 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0bed957-c350-4ed9-a223-c7bd60953985-secret-volume\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.356618 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74pzp\" (UniqueName: \"kubernetes.io/projected/c0bed957-c350-4ed9-a223-c7bd60953985-kube-api-access-74pzp\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.356709 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0bed957-c350-4ed9-a223-c7bd60953985-config-volume\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.357493 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0bed957-c350-4ed9-a223-c7bd60953985-config-volume\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.357701 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0bed957-c350-4ed9-a223-c7bd60953985-secret-volume\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.367072 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0bed957-c350-4ed9-a223-c7bd60953985-secret-volume\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.387476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74pzp\" (UniqueName: \"kubernetes.io/projected/c0bed957-c350-4ed9-a223-c7bd60953985-kube-api-access-74pzp\") pod \"collect-profiles-29496855-kr64s\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:00 crc kubenswrapper[4721]: I0130 22:15:00.477872 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.318345 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fthbb"] Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.324087 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.349852 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fthbb"] Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.499275 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-catalog-content\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.499349 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-utilities\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.499576 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nst54\" (UniqueName: \"kubernetes.io/projected/38e6a7f0-8832-40f5-9402-d32b65612b9a-kube-api-access-nst54\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.601127 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-catalog-content\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.601183 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-utilities\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.601315 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nst54\" (UniqueName: \"kubernetes.io/projected/38e6a7f0-8832-40f5-9402-d32b65612b9a-kube-api-access-nst54\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.601725 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-catalog-content\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.601791 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-utilities\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.622308 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nst54\" (UniqueName: \"kubernetes.io/projected/38e6a7f0-8832-40f5-9402-d32b65612b9a-kube-api-access-nst54\") pod \"redhat-operators-fthbb\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:14 crc kubenswrapper[4721]: I0130 22:15:14.657125 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:16 crc kubenswrapper[4721]: E0130 22:15:16.876596 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Jan 30 22:15:16 crc kubenswrapper[4721]: E0130 22:15:16.878250 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9547f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(519f6e4a-b90c-4146-8ac9-d03854442bdd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 30 22:15:16 crc kubenswrapper[4721]: E0130 22:15:16.879564 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="519f6e4a-b90c-4146-8ac9-d03854442bdd" Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.299016 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s"] Jan 30 22:15:17 crc kubenswrapper[4721]: W0130 22:15:17.306046 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0bed957_c350_4ed9_a223_c7bd60953985.slice/crio-c250a95b6c0a9ee5d7a03c2abffbef2e78e8ac46a91de1e99ac879d96dafff7b WatchSource:0}: Error finding container c250a95b6c0a9ee5d7a03c2abffbef2e78e8ac46a91de1e99ac879d96dafff7b: Status 404 returned error can't find the container with id c250a95b6c0a9ee5d7a03c2abffbef2e78e8ac46a91de1e99ac879d96dafff7b Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.311936 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fthbb"] Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.763342 4721 generic.go:334] "Generic (PLEG): container finished" podID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerID="75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440" exitCode=0 Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.764892 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerDied","Data":"75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440"} Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.764937 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerStarted","Data":"1bb1a778a0050c7c48ac0b05c7789c7fd7d582f6cc5fa2b7779a8aac80b4aa46"} Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.773722 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" event={"ID":"c0bed957-c350-4ed9-a223-c7bd60953985","Type":"ContainerStarted","Data":"6f2585757abea728f0252be73db09277b0e29e33c7e26bd28aa3a185137ccdac"} Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.773788 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" event={"ID":"c0bed957-c350-4ed9-a223-c7bd60953985","Type":"ContainerStarted","Data":"c250a95b6c0a9ee5d7a03c2abffbef2e78e8ac46a91de1e99ac879d96dafff7b"} Jan 30 22:15:17 crc kubenswrapper[4721]: E0130 22:15:17.774469 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="519f6e4a-b90c-4146-8ac9-d03854442bdd" Jan 30 22:15:17 crc kubenswrapper[4721]: I0130 22:15:17.813460 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" podStartSLOduration=17.813435134 podStartE2EDuration="17.813435134s" podCreationTimestamp="2026-01-30 22:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 22:15:17.807656714 +0000 UTC m=+3506.599557980" watchObservedRunningTime="2026-01-30 22:15:17.813435134 +0000 UTC m=+3506.605336380" Jan 30 22:15:18 crc kubenswrapper[4721]: I0130 22:15:18.784589 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerStarted","Data":"bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a"} Jan 30 22:15:18 crc kubenswrapper[4721]: I0130 22:15:18.788727 4721 generic.go:334] "Generic (PLEG): container finished" podID="c0bed957-c350-4ed9-a223-c7bd60953985" containerID="6f2585757abea728f0252be73db09277b0e29e33c7e26bd28aa3a185137ccdac" exitCode=0 Jan 30 22:15:18 crc kubenswrapper[4721]: I0130 22:15:18.788768 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" event={"ID":"c0bed957-c350-4ed9-a223-c7bd60953985","Type":"ContainerDied","Data":"6f2585757abea728f0252be73db09277b0e29e33c7e26bd28aa3a185137ccdac"} Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.209523 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.328492 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0bed957-c350-4ed9-a223-c7bd60953985-secret-volume\") pod \"c0bed957-c350-4ed9-a223-c7bd60953985\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.328583 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74pzp\" (UniqueName: \"kubernetes.io/projected/c0bed957-c350-4ed9-a223-c7bd60953985-kube-api-access-74pzp\") pod \"c0bed957-c350-4ed9-a223-c7bd60953985\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.328664 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0bed957-c350-4ed9-a223-c7bd60953985-config-volume\") pod \"c0bed957-c350-4ed9-a223-c7bd60953985\" (UID: \"c0bed957-c350-4ed9-a223-c7bd60953985\") " Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.329632 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0bed957-c350-4ed9-a223-c7bd60953985-config-volume" (OuterVolumeSpecName: "config-volume") pod "c0bed957-c350-4ed9-a223-c7bd60953985" (UID: "c0bed957-c350-4ed9-a223-c7bd60953985"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.334948 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0bed957-c350-4ed9-a223-c7bd60953985-kube-api-access-74pzp" (OuterVolumeSpecName: "kube-api-access-74pzp") pod "c0bed957-c350-4ed9-a223-c7bd60953985" (UID: "c0bed957-c350-4ed9-a223-c7bd60953985"). InnerVolumeSpecName "kube-api-access-74pzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.335186 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0bed957-c350-4ed9-a223-c7bd60953985-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c0bed957-c350-4ed9-a223-c7bd60953985" (UID: "c0bed957-c350-4ed9-a223-c7bd60953985"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.355172 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb"] Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.364095 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496810-vqlxb"] Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.431327 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0bed957-c350-4ed9-a223-c7bd60953985-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.431366 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0bed957-c350-4ed9-a223-c7bd60953985-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.431401 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74pzp\" (UniqueName: \"kubernetes.io/projected/c0bed957-c350-4ed9-a223-c7bd60953985-kube-api-access-74pzp\") on node \"crc\" DevicePath \"\"" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.805579 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" event={"ID":"c0bed957-c350-4ed9-a223-c7bd60953985","Type":"ContainerDied","Data":"c250a95b6c0a9ee5d7a03c2abffbef2e78e8ac46a91de1e99ac879d96dafff7b"} Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.805636 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c250a95b6c0a9ee5d7a03c2abffbef2e78e8ac46a91de1e99ac879d96dafff7b" Jan 30 22:15:20 crc kubenswrapper[4721]: I0130 22:15:20.805636 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496855-kr64s" Jan 30 22:15:22 crc kubenswrapper[4721]: I0130 22:15:22.122984 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f52e1d0c-42d1-4ae6-824a-46ebbfff9fca" path="/var/lib/kubelet/pods/f52e1d0c-42d1-4ae6-824a-46ebbfff9fca/volumes" Jan 30 22:15:26 crc kubenswrapper[4721]: I0130 22:15:26.865460 4721 generic.go:334] "Generic (PLEG): container finished" podID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerID="bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a" exitCode=0 Jan 30 22:15:26 crc kubenswrapper[4721]: I0130 22:15:26.865573 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerDied","Data":"bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a"} Jan 30 22:15:27 crc kubenswrapper[4721]: I0130 22:15:27.876692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerStarted","Data":"e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115"} Jan 30 22:15:27 crc kubenswrapper[4721]: I0130 22:15:27.897990 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fthbb" podStartSLOduration=4.423575803 podStartE2EDuration="13.897962879s" podCreationTimestamp="2026-01-30 22:15:14 +0000 UTC" firstStartedPulling="2026-01-30 22:15:17.772106115 +0000 UTC m=+3506.564007361" lastFinishedPulling="2026-01-30 22:15:27.246493191 +0000 UTC m=+3516.038394437" observedRunningTime="2026-01-30 22:15:27.891634421 +0000 UTC m=+3516.683535677" watchObservedRunningTime="2026-01-30 22:15:27.897962879 +0000 UTC m=+3516.689864135" Jan 30 22:15:29 crc kubenswrapper[4721]: I0130 22:15:29.522390 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jan 30 22:15:30 crc kubenswrapper[4721]: I0130 22:15:30.909179 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"519f6e4a-b90c-4146-8ac9-d03854442bdd","Type":"ContainerStarted","Data":"7a0cb033d7c1f91f7d5f1e56708cf1cea900cc1dcaf4d556dfb7344e0e4cd328"} Jan 30 22:15:30 crc kubenswrapper[4721]: I0130 22:15:30.939342 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.853988687 podStartE2EDuration="53.939316868s" podCreationTimestamp="2026-01-30 22:14:37 +0000 UTC" firstStartedPulling="2026-01-30 22:14:39.433660848 +0000 UTC m=+3468.225562094" lastFinishedPulling="2026-01-30 22:15:29.518989029 +0000 UTC m=+3518.310890275" observedRunningTime="2026-01-30 22:15:30.924883137 +0000 UTC m=+3519.716784393" watchObservedRunningTime="2026-01-30 22:15:30.939316868 +0000 UTC m=+3519.731218114" Jan 30 22:15:34 crc kubenswrapper[4721]: I0130 22:15:34.657639 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:34 crc kubenswrapper[4721]: I0130 22:15:34.658105 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:34 crc kubenswrapper[4721]: I0130 22:15:34.704785 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:35 crc kubenswrapper[4721]: I0130 22:15:35.005951 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:35 crc kubenswrapper[4721]: I0130 22:15:35.551672 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fthbb"] Jan 30 22:15:36 crc kubenswrapper[4721]: I0130 22:15:36.979018 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fthbb" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="registry-server" containerID="cri-o://e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115" gracePeriod=2 Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.507664 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.623442 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-utilities\") pod \"38e6a7f0-8832-40f5-9402-d32b65612b9a\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.623513 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-catalog-content\") pod \"38e6a7f0-8832-40f5-9402-d32b65612b9a\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.623562 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nst54\" (UniqueName: \"kubernetes.io/projected/38e6a7f0-8832-40f5-9402-d32b65612b9a-kube-api-access-nst54\") pod \"38e6a7f0-8832-40f5-9402-d32b65612b9a\" (UID: \"38e6a7f0-8832-40f5-9402-d32b65612b9a\") " Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.624333 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-utilities" (OuterVolumeSpecName: "utilities") pod "38e6a7f0-8832-40f5-9402-d32b65612b9a" (UID: "38e6a7f0-8832-40f5-9402-d32b65612b9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.628783 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38e6a7f0-8832-40f5-9402-d32b65612b9a-kube-api-access-nst54" (OuterVolumeSpecName: "kube-api-access-nst54") pod "38e6a7f0-8832-40f5-9402-d32b65612b9a" (UID: "38e6a7f0-8832-40f5-9402-d32b65612b9a"). InnerVolumeSpecName "kube-api-access-nst54". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.725827 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.726159 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nst54\" (UniqueName: \"kubernetes.io/projected/38e6a7f0-8832-40f5-9402-d32b65612b9a-kube-api-access-nst54\") on node \"crc\" DevicePath \"\"" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.750127 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38e6a7f0-8832-40f5-9402-d32b65612b9a" (UID: "38e6a7f0-8832-40f5-9402-d32b65612b9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.827778 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38e6a7f0-8832-40f5-9402-d32b65612b9a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.990123 4721 generic.go:334] "Generic (PLEG): container finished" podID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerID="e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115" exitCode=0 Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.990173 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerDied","Data":"e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115"} Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.990207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fthbb" event={"ID":"38e6a7f0-8832-40f5-9402-d32b65612b9a","Type":"ContainerDied","Data":"1bb1a778a0050c7c48ac0b05c7789c7fd7d582f6cc5fa2b7779a8aac80b4aa46"} Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.990226 4721 scope.go:117] "RemoveContainer" containerID="e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115" Jan 30 22:15:37 crc kubenswrapper[4721]: I0130 22:15:37.990404 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fthbb" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.025378 4721 scope.go:117] "RemoveContainer" containerID="bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.029172 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fthbb"] Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.041191 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fthbb"] Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.055451 4721 scope.go:117] "RemoveContainer" containerID="75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.117262 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" path="/var/lib/kubelet/pods/38e6a7f0-8832-40f5-9402-d32b65612b9a/volumes" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.127953 4721 scope.go:117] "RemoveContainer" containerID="e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115" Jan 30 22:15:38 crc kubenswrapper[4721]: E0130 22:15:38.128391 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115\": container with ID starting with e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115 not found: ID does not exist" containerID="e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.128423 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115"} err="failed to get container status \"e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115\": rpc error: code = NotFound desc = could not find container \"e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115\": container with ID starting with e587086ae75160dfab5e92f94b4cc47c5d3e2006f29479a575896c9fef344115 not found: ID does not exist" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.128455 4721 scope.go:117] "RemoveContainer" containerID="bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a" Jan 30 22:15:38 crc kubenswrapper[4721]: E0130 22:15:38.128922 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a\": container with ID starting with bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a not found: ID does not exist" containerID="bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.128945 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a"} err="failed to get container status \"bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a\": rpc error: code = NotFound desc = could not find container \"bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a\": container with ID starting with bbde7ca9997767e575a29ebf8e3d42df43a1288cce3868432e5f5bd2413a902a not found: ID does not exist" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.128962 4721 scope.go:117] "RemoveContainer" containerID="75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440" Jan 30 22:15:38 crc kubenswrapper[4721]: E0130 22:15:38.130049 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440\": container with ID starting with 75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440 not found: ID does not exist" containerID="75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440" Jan 30 22:15:38 crc kubenswrapper[4721]: I0130 22:15:38.130077 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440"} err="failed to get container status \"75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440\": rpc error: code = NotFound desc = could not find container \"75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440\": container with ID starting with 75047669f633b834681473901a20d4467f5c56b86c95893d5c779f30088d0440 not found: ID does not exist" Jan 30 22:15:59 crc kubenswrapper[4721]: I0130 22:15:59.449066 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:15:59 crc kubenswrapper[4721]: I0130 22:15:59.449775 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:16:16 crc kubenswrapper[4721]: I0130 22:16:16.816732 4721 scope.go:117] "RemoveContainer" containerID="d0e9ca4a2e13396d3ab7df216f4a804f733562f20fa8af8e000bcaccaacf4f9a" Jan 30 22:16:29 crc kubenswrapper[4721]: I0130 22:16:29.448402 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:16:29 crc kubenswrapper[4721]: I0130 22:16:29.450120 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.448964 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.449442 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.449518 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.450230 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.450271 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" gracePeriod=600 Jan 30 22:16:59 crc kubenswrapper[4721]: E0130 22:16:59.580010 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.789356 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" exitCode=0 Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.789430 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f"} Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.789516 4721 scope.go:117] "RemoveContainer" containerID="8cd25e378ee88e963690a4f456d348dfee9431c8a64722a2914aa440d94fdf76" Jan 30 22:16:59 crc kubenswrapper[4721]: I0130 22:16:59.790372 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:16:59 crc kubenswrapper[4721]: E0130 22:16:59.790710 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:17:11 crc kubenswrapper[4721]: I0130 22:17:11.092778 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:17:11 crc kubenswrapper[4721]: E0130 22:17:11.093543 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:17:24 crc kubenswrapper[4721]: I0130 22:17:24.092403 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:17:24 crc kubenswrapper[4721]: E0130 22:17:24.093223 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:17:39 crc kubenswrapper[4721]: I0130 22:17:39.092599 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:17:39 crc kubenswrapper[4721]: E0130 22:17:39.093459 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:17:52 crc kubenswrapper[4721]: I0130 22:17:52.103962 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:17:52 crc kubenswrapper[4721]: E0130 22:17:52.104663 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:18:07 crc kubenswrapper[4721]: I0130 22:18:07.092532 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:18:07 crc kubenswrapper[4721]: E0130 22:18:07.093595 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:18:21 crc kubenswrapper[4721]: I0130 22:18:21.092020 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:18:21 crc kubenswrapper[4721]: E0130 22:18:21.092822 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:18:34 crc kubenswrapper[4721]: I0130 22:18:34.092449 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:18:34 crc kubenswrapper[4721]: E0130 22:18:34.093521 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:18:49 crc kubenswrapper[4721]: I0130 22:18:49.092574 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:18:49 crc kubenswrapper[4721]: E0130 22:18:49.093505 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.360828 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pzs6t"] Jan 30 22:18:56 crc kubenswrapper[4721]: E0130 22:18:56.361892 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0bed957-c350-4ed9-a223-c7bd60953985" containerName="collect-profiles" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.361906 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0bed957-c350-4ed9-a223-c7bd60953985" containerName="collect-profiles" Jan 30 22:18:56 crc kubenswrapper[4721]: E0130 22:18:56.361938 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="registry-server" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.361946 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="registry-server" Jan 30 22:18:56 crc kubenswrapper[4721]: E0130 22:18:56.361962 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="extract-utilities" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.361969 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="extract-utilities" Jan 30 22:18:56 crc kubenswrapper[4721]: E0130 22:18:56.361980 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="extract-content" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.361986 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="extract-content" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.362184 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="38e6a7f0-8832-40f5-9402-d32b65612b9a" containerName="registry-server" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.362213 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0bed957-c350-4ed9-a223-c7bd60953985" containerName="collect-profiles" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.365067 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.379491 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pzs6t"] Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.459180 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4l2w\" (UniqueName: \"kubernetes.io/projected/fab8a756-4964-47cf-ad9c-4b07094fbd32-kube-api-access-w4l2w\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.459266 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-catalog-content\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.459771 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-utilities\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.561758 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-utilities\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.562238 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4l2w\" (UniqueName: \"kubernetes.io/projected/fab8a756-4964-47cf-ad9c-4b07094fbd32-kube-api-access-w4l2w\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.562352 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-utilities\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.562406 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-catalog-content\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.562900 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-catalog-content\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.607203 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4l2w\" (UniqueName: \"kubernetes.io/projected/fab8a756-4964-47cf-ad9c-4b07094fbd32-kube-api-access-w4l2w\") pod \"community-operators-pzs6t\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:56 crc kubenswrapper[4721]: I0130 22:18:56.692247 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:18:57 crc kubenswrapper[4721]: I0130 22:18:57.300583 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pzs6t"] Jan 30 22:18:58 crc kubenswrapper[4721]: I0130 22:18:58.040188 4721 generic.go:334] "Generic (PLEG): container finished" podID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerID="5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091" exitCode=0 Jan 30 22:18:58 crc kubenswrapper[4721]: I0130 22:18:58.040291 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerDied","Data":"5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091"} Jan 30 22:18:58 crc kubenswrapper[4721]: I0130 22:18:58.040488 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerStarted","Data":"1312c628d360fe4963d62bee42e22601cf73fe02a3ee7c6315d645485cdcc6c2"} Jan 30 22:18:59 crc kubenswrapper[4721]: I0130 22:18:59.055566 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerStarted","Data":"5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add"} Jan 30 22:19:01 crc kubenswrapper[4721]: I0130 22:19:01.073741 4721 generic.go:334] "Generic (PLEG): container finished" podID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerID="5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add" exitCode=0 Jan 30 22:19:01 crc kubenswrapper[4721]: I0130 22:19:01.073815 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerDied","Data":"5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add"} Jan 30 22:19:01 crc kubenswrapper[4721]: I0130 22:19:01.091949 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:19:01 crc kubenswrapper[4721]: E0130 22:19:01.092320 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:19:02 crc kubenswrapper[4721]: I0130 22:19:02.084150 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerStarted","Data":"5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6"} Jan 30 22:19:06 crc kubenswrapper[4721]: I0130 22:19:06.692610 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:19:06 crc kubenswrapper[4721]: I0130 22:19:06.693227 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:19:07 crc kubenswrapper[4721]: I0130 22:19:07.743407 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-pzs6t" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="registry-server" probeResult="failure" output=< Jan 30 22:19:07 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 22:19:07 crc kubenswrapper[4721]: > Jan 30 22:19:13 crc kubenswrapper[4721]: I0130 22:19:13.093467 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:19:13 crc kubenswrapper[4721]: E0130 22:19:13.094433 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:19:16 crc kubenswrapper[4721]: I0130 22:19:16.752448 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:19:16 crc kubenswrapper[4721]: I0130 22:19:16.779321 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pzs6t" podStartSLOduration=17.363627362 podStartE2EDuration="20.77928476s" podCreationTimestamp="2026-01-30 22:18:56 +0000 UTC" firstStartedPulling="2026-01-30 22:18:58.043406408 +0000 UTC m=+3726.835307664" lastFinishedPulling="2026-01-30 22:19:01.459063816 +0000 UTC m=+3730.250965062" observedRunningTime="2026-01-30 22:19:02.104351921 +0000 UTC m=+3730.896253187" watchObservedRunningTime="2026-01-30 22:19:16.77928476 +0000 UTC m=+3745.571186016" Jan 30 22:19:16 crc kubenswrapper[4721]: I0130 22:19:16.820863 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:19:16 crc kubenswrapper[4721]: I0130 22:19:16.993187 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pzs6t"] Jan 30 22:19:18 crc kubenswrapper[4721]: I0130 22:19:18.239799 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pzs6t" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="registry-server" containerID="cri-o://5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6" gracePeriod=2 Jan 30 22:19:18 crc kubenswrapper[4721]: I0130 22:19:18.989787 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.143478 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-utilities\") pod \"fab8a756-4964-47cf-ad9c-4b07094fbd32\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.143541 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4l2w\" (UniqueName: \"kubernetes.io/projected/fab8a756-4964-47cf-ad9c-4b07094fbd32-kube-api-access-w4l2w\") pod \"fab8a756-4964-47cf-ad9c-4b07094fbd32\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.144519 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-utilities" (OuterVolumeSpecName: "utilities") pod "fab8a756-4964-47cf-ad9c-4b07094fbd32" (UID: "fab8a756-4964-47cf-ad9c-4b07094fbd32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.144727 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-catalog-content\") pod \"fab8a756-4964-47cf-ad9c-4b07094fbd32\" (UID: \"fab8a756-4964-47cf-ad9c-4b07094fbd32\") " Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.145622 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.153903 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fab8a756-4964-47cf-ad9c-4b07094fbd32-kube-api-access-w4l2w" (OuterVolumeSpecName: "kube-api-access-w4l2w") pod "fab8a756-4964-47cf-ad9c-4b07094fbd32" (UID: "fab8a756-4964-47cf-ad9c-4b07094fbd32"). InnerVolumeSpecName "kube-api-access-w4l2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.208217 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fab8a756-4964-47cf-ad9c-4b07094fbd32" (UID: "fab8a756-4964-47cf-ad9c-4b07094fbd32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.247095 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4l2w\" (UniqueName: \"kubernetes.io/projected/fab8a756-4964-47cf-ad9c-4b07094fbd32-kube-api-access-w4l2w\") on node \"crc\" DevicePath \"\"" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.247128 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fab8a756-4964-47cf-ad9c-4b07094fbd32-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.251822 4721 generic.go:334] "Generic (PLEG): container finished" podID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerID="5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6" exitCode=0 Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.251877 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerDied","Data":"5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6"} Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.251883 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzs6t" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.251904 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzs6t" event={"ID":"fab8a756-4964-47cf-ad9c-4b07094fbd32","Type":"ContainerDied","Data":"1312c628d360fe4963d62bee42e22601cf73fe02a3ee7c6315d645485cdcc6c2"} Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.251922 4721 scope.go:117] "RemoveContainer" containerID="5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.275477 4721 scope.go:117] "RemoveContainer" containerID="5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.292958 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pzs6t"] Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.302613 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pzs6t"] Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.313470 4721 scope.go:117] "RemoveContainer" containerID="5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.360962 4721 scope.go:117] "RemoveContainer" containerID="5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6" Jan 30 22:19:19 crc kubenswrapper[4721]: E0130 22:19:19.365866 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6\": container with ID starting with 5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6 not found: ID does not exist" containerID="5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.365941 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6"} err="failed to get container status \"5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6\": rpc error: code = NotFound desc = could not find container \"5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6\": container with ID starting with 5947d305e697e7dbd9506f37585b01fa38d186a78f2e362b886954d1382307a6 not found: ID does not exist" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.365990 4721 scope.go:117] "RemoveContainer" containerID="5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add" Jan 30 22:19:19 crc kubenswrapper[4721]: E0130 22:19:19.366443 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add\": container with ID starting with 5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add not found: ID does not exist" containerID="5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.366491 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add"} err="failed to get container status \"5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add\": rpc error: code = NotFound desc = could not find container \"5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add\": container with ID starting with 5920263e4627bceb6e43fadda579077675a8191d73d29f1e769ef4f961af9add not found: ID does not exist" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.366522 4721 scope.go:117] "RemoveContainer" containerID="5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091" Jan 30 22:19:19 crc kubenswrapper[4721]: E0130 22:19:19.366874 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091\": container with ID starting with 5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091 not found: ID does not exist" containerID="5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091" Jan 30 22:19:19 crc kubenswrapper[4721]: I0130 22:19:19.366911 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091"} err="failed to get container status \"5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091\": rpc error: code = NotFound desc = could not find container \"5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091\": container with ID starting with 5e6812a57dc8d0b32048976353cd7a7a1fdd90566fe86da646b4c995ad127091 not found: ID does not exist" Jan 30 22:19:20 crc kubenswrapper[4721]: I0130 22:19:20.105154 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" path="/var/lib/kubelet/pods/fab8a756-4964-47cf-ad9c-4b07094fbd32/volumes" Jan 30 22:19:25 crc kubenswrapper[4721]: I0130 22:19:25.093251 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:19:25 crc kubenswrapper[4721]: E0130 22:19:25.094290 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:19:37 crc kubenswrapper[4721]: I0130 22:19:37.092598 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:19:37 crc kubenswrapper[4721]: E0130 22:19:37.093379 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:19:49 crc kubenswrapper[4721]: I0130 22:19:49.091873 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:19:49 crc kubenswrapper[4721]: E0130 22:19:49.092902 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:20:03 crc kubenswrapper[4721]: I0130 22:20:03.093147 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:20:03 crc kubenswrapper[4721]: E0130 22:20:03.094049 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:20:12 crc kubenswrapper[4721]: I0130 22:20:12.727191 4721 generic.go:334] "Generic (PLEG): container finished" podID="519f6e4a-b90c-4146-8ac9-d03854442bdd" containerID="7a0cb033d7c1f91f7d5f1e56708cf1cea900cc1dcaf4d556dfb7344e0e4cd328" exitCode=0 Jan 30 22:20:12 crc kubenswrapper[4721]: I0130 22:20:12.727269 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"519f6e4a-b90c-4146-8ac9-d03854442bdd","Type":"ContainerDied","Data":"7a0cb033d7c1f91f7d5f1e56708cf1cea900cc1dcaf4d556dfb7344e0e4cd328"} Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.278771 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.429772 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-config-data\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430138 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ca-certs\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430183 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430217 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ssh-key\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430316 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config-secret\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430364 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-workdir\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430397 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9547f\" (UniqueName: \"kubernetes.io/projected/519f6e4a-b90c-4146-8ac9-d03854442bdd-kube-api-access-9547f\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430474 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-temporary\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.430577 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"519f6e4a-b90c-4146-8ac9-d03854442bdd\" (UID: \"519f6e4a-b90c-4146-8ac9-d03854442bdd\") " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.431288 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.431794 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-config-data" (OuterVolumeSpecName: "config-data") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.439400 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519f6e4a-b90c-4146-8ac9-d03854442bdd-kube-api-access-9547f" (OuterVolumeSpecName: "kube-api-access-9547f") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "kube-api-access-9547f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.441654 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "test-operator-logs") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.470151 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.477757 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.507056 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.527852 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532314 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532335 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9547f\" (UniqueName: \"kubernetes.io/projected/519f6e4a-b90c-4146-8ac9-d03854442bdd-kube-api-access-9547f\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532346 4721 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532366 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532376 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-config-data\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532385 4721 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ca-certs\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532394 4721 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/519f6e4a-b90c-4146-8ac9-d03854442bdd-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.532403 4721 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/519f6e4a-b90c-4146-8ac9-d03854442bdd-ssh-key\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.565975 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.634902 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.750796 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"519f6e4a-b90c-4146-8ac9-d03854442bdd","Type":"ContainerDied","Data":"24b6bcb2ae27c54c49c65d58c3951803135e1b9f5228b675862c5d67c368663a"} Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.750848 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24b6bcb2ae27c54c49c65d58c3951803135e1b9f5228b675862c5d67c368663a" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.750912 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.830250 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "519f6e4a-b90c-4146-8ac9-d03854442bdd" (UID: "519f6e4a-b90c-4146-8ac9-d03854442bdd"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:20:14 crc kubenswrapper[4721]: I0130 22:20:14.839573 4721 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/519f6e4a-b90c-4146-8ac9-d03854442bdd-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Jan 30 22:20:18 crc kubenswrapper[4721]: I0130 22:20:18.092964 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:20:18 crc kubenswrapper[4721]: E0130 22:20:18.094134 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.399734 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 30 22:20:25 crc kubenswrapper[4721]: E0130 22:20:25.401839 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="extract-content" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.401939 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="extract-content" Jan 30 22:20:25 crc kubenswrapper[4721]: E0130 22:20:25.402028 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="extract-utilities" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.402104 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="extract-utilities" Jan 30 22:20:25 crc kubenswrapper[4721]: E0130 22:20:25.402193 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519f6e4a-b90c-4146-8ac9-d03854442bdd" containerName="tempest-tests-tempest-tests-runner" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.402316 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="519f6e4a-b90c-4146-8ac9-d03854442bdd" containerName="tempest-tests-tempest-tests-runner" Jan 30 22:20:25 crc kubenswrapper[4721]: E0130 22:20:25.402399 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="registry-server" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.402477 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="registry-server" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.402793 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fab8a756-4964-47cf-ad9c-4b07094fbd32" containerName="registry-server" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.402902 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="519f6e4a-b90c-4146-8ac9-d03854442bdd" containerName="tempest-tests-tempest-tests-runner" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.403882 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.406625 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-psmff" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.423499 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.561084 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.561144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp8jc\" (UniqueName: \"kubernetes.io/projected/177f7165-d126-4742-a778-e5e845c54fab-kube-api-access-tp8jc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.662967 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.663034 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp8jc\" (UniqueName: \"kubernetes.io/projected/177f7165-d126-4742-a778-e5e845c54fab-kube-api-access-tp8jc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.663900 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.698116 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp8jc\" (UniqueName: \"kubernetes.io/projected/177f7165-d126-4742-a778-e5e845c54fab-kube-api-access-tp8jc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.705442 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"177f7165-d126-4742-a778-e5e845c54fab\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:25 crc kubenswrapper[4721]: I0130 22:20:25.732560 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 30 22:20:26 crc kubenswrapper[4721]: I0130 22:20:26.299355 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 30 22:20:26 crc kubenswrapper[4721]: I0130 22:20:26.303508 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 22:20:26 crc kubenswrapper[4721]: I0130 22:20:26.884216 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"177f7165-d126-4742-a778-e5e845c54fab","Type":"ContainerStarted","Data":"14d4bf85c13e2e56bafe3f084f041d62ce2bf1d817d41b3b07bab3bd0a20be99"} Jan 30 22:20:27 crc kubenswrapper[4721]: I0130 22:20:27.895110 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"177f7165-d126-4742-a778-e5e845c54fab","Type":"ContainerStarted","Data":"4b04dbc9e0d14ddaa0316ac1226bb732678a6bf42716331fa5ae9c92470979df"} Jan 30 22:20:27 crc kubenswrapper[4721]: I0130 22:20:27.915315 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.734399948 podStartE2EDuration="2.915265975s" podCreationTimestamp="2026-01-30 22:20:25 +0000 UTC" firstStartedPulling="2026-01-30 22:20:26.303220034 +0000 UTC m=+3815.095121280" lastFinishedPulling="2026-01-30 22:20:27.484086071 +0000 UTC m=+3816.275987307" observedRunningTime="2026-01-30 22:20:27.910268569 +0000 UTC m=+3816.702169835" watchObservedRunningTime="2026-01-30 22:20:27.915265975 +0000 UTC m=+3816.707167221" Jan 30 22:20:33 crc kubenswrapper[4721]: I0130 22:20:33.092706 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:20:33 crc kubenswrapper[4721]: E0130 22:20:33.093635 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:20:48 crc kubenswrapper[4721]: I0130 22:20:48.093229 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:20:48 crc kubenswrapper[4721]: E0130 22:20:48.094009 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.850995 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9rfs8/must-gather-zjqmz"] Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.853229 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.855921 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9rfs8"/"default-dockercfg-57zbq" Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.855955 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9rfs8"/"openshift-service-ca.crt" Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.858753 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9rfs8"/"kube-root-ca.crt" Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.866202 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9rfs8/must-gather-zjqmz"] Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.920470 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnftb\" (UniqueName: \"kubernetes.io/projected/dd16370f-a0bb-4f72-8822-c4c87a37ba39-kube-api-access-pnftb\") pod \"must-gather-zjqmz\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:56 crc kubenswrapper[4721]: I0130 22:20:56.920608 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dd16370f-a0bb-4f72-8822-c4c87a37ba39-must-gather-output\") pod \"must-gather-zjqmz\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:57 crc kubenswrapper[4721]: I0130 22:20:57.023167 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnftb\" (UniqueName: \"kubernetes.io/projected/dd16370f-a0bb-4f72-8822-c4c87a37ba39-kube-api-access-pnftb\") pod \"must-gather-zjqmz\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:57 crc kubenswrapper[4721]: I0130 22:20:57.023260 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dd16370f-a0bb-4f72-8822-c4c87a37ba39-must-gather-output\") pod \"must-gather-zjqmz\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:57 crc kubenswrapper[4721]: I0130 22:20:57.023924 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dd16370f-a0bb-4f72-8822-c4c87a37ba39-must-gather-output\") pod \"must-gather-zjqmz\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:57 crc kubenswrapper[4721]: I0130 22:20:57.048785 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnftb\" (UniqueName: \"kubernetes.io/projected/dd16370f-a0bb-4f72-8822-c4c87a37ba39-kube-api-access-pnftb\") pod \"must-gather-zjqmz\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:57 crc kubenswrapper[4721]: I0130 22:20:57.172771 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:20:57 crc kubenswrapper[4721]: I0130 22:20:57.779855 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9rfs8/must-gather-zjqmz"] Jan 30 22:20:58 crc kubenswrapper[4721]: I0130 22:20:58.183910 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" event={"ID":"dd16370f-a0bb-4f72-8822-c4c87a37ba39","Type":"ContainerStarted","Data":"067ca1546fa744b26c6482558c06da39f732d00acd76d0e1fec87a569b87cd8d"} Jan 30 22:21:01 crc kubenswrapper[4721]: I0130 22:21:01.092188 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:21:01 crc kubenswrapper[4721]: E0130 22:21:01.093008 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:21:02 crc kubenswrapper[4721]: I0130 22:21:02.222088 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" event={"ID":"dd16370f-a0bb-4f72-8822-c4c87a37ba39","Type":"ContainerStarted","Data":"ad4dad7c8fd5d8cc04f3e7f1178e0fe6e87d38da599dfb4dc7e1c56c62f179fc"} Jan 30 22:21:02 crc kubenswrapper[4721]: I0130 22:21:02.222592 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" event={"ID":"dd16370f-a0bb-4f72-8822-c4c87a37ba39","Type":"ContainerStarted","Data":"0de58a4330234414fad2e6b9cebb6610dd420b8e8af1b12593fc1835a00cbcd5"} Jan 30 22:21:02 crc kubenswrapper[4721]: I0130 22:21:02.243570 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" podStartSLOduration=2.457809516 podStartE2EDuration="6.243548952s" podCreationTimestamp="2026-01-30 22:20:56 +0000 UTC" firstStartedPulling="2026-01-30 22:20:57.785027883 +0000 UTC m=+3846.576929129" lastFinishedPulling="2026-01-30 22:21:01.570767319 +0000 UTC m=+3850.362668565" observedRunningTime="2026-01-30 22:21:02.240652182 +0000 UTC m=+3851.032553428" watchObservedRunningTime="2026-01-30 22:21:02.243548952 +0000 UTC m=+3851.035450198" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.106098 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-zbvl4"] Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.108089 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.253494 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/83d7f55f-b62e-4f29-9b14-9e6cddb27967-host\") pod \"crc-debug-zbvl4\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.254582 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2qv2\" (UniqueName: \"kubernetes.io/projected/83d7f55f-b62e-4f29-9b14-9e6cddb27967-kube-api-access-d2qv2\") pod \"crc-debug-zbvl4\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.358169 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/83d7f55f-b62e-4f29-9b14-9e6cddb27967-host\") pod \"crc-debug-zbvl4\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.358312 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/83d7f55f-b62e-4f29-9b14-9e6cddb27967-host\") pod \"crc-debug-zbvl4\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.358374 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2qv2\" (UniqueName: \"kubernetes.io/projected/83d7f55f-b62e-4f29-9b14-9e6cddb27967-kube-api-access-d2qv2\") pod \"crc-debug-zbvl4\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.378432 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2qv2\" (UniqueName: \"kubernetes.io/projected/83d7f55f-b62e-4f29-9b14-9e6cddb27967-kube-api-access-d2qv2\") pod \"crc-debug-zbvl4\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:06 crc kubenswrapper[4721]: I0130 22:21:06.429132 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:21:07 crc kubenswrapper[4721]: I0130 22:21:07.280822 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" event={"ID":"83d7f55f-b62e-4f29-9b14-9e6cddb27967","Type":"ContainerStarted","Data":"4c511bfbfcc708253f2fa4f390dcf89ac8294ddbec08ce160dbea4bae07363e1"} Jan 30 22:21:15 crc kubenswrapper[4721]: I0130 22:21:15.093045 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:21:15 crc kubenswrapper[4721]: E0130 22:21:15.093875 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:21:19 crc kubenswrapper[4721]: I0130 22:21:19.429256 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" event={"ID":"83d7f55f-b62e-4f29-9b14-9e6cddb27967","Type":"ContainerStarted","Data":"1e4b9dcc635b9ee05ceff244e7e9458005ded128ebae31fcf1d450e80f3adf47"} Jan 30 22:21:19 crc kubenswrapper[4721]: I0130 22:21:19.450495 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" podStartSLOduration=1.317531474 podStartE2EDuration="13.450472245s" podCreationTimestamp="2026-01-30 22:21:06 +0000 UTC" firstStartedPulling="2026-01-30 22:21:06.473891321 +0000 UTC m=+3855.265792557" lastFinishedPulling="2026-01-30 22:21:18.606832082 +0000 UTC m=+3867.398733328" observedRunningTime="2026-01-30 22:21:19.442220698 +0000 UTC m=+3868.234121944" watchObservedRunningTime="2026-01-30 22:21:19.450472245 +0000 UTC m=+3868.242373501" Jan 30 22:21:30 crc kubenswrapper[4721]: I0130 22:21:30.093395 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:21:30 crc kubenswrapper[4721]: E0130 22:21:30.094404 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:21:42 crc kubenswrapper[4721]: I0130 22:21:42.100965 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:21:42 crc kubenswrapper[4721]: E0130 22:21:42.101762 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:21:56 crc kubenswrapper[4721]: I0130 22:21:56.092615 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:21:56 crc kubenswrapper[4721]: E0130 22:21:56.094697 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:22:07 crc kubenswrapper[4721]: I0130 22:22:07.093000 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:22:08 crc kubenswrapper[4721]: I0130 22:22:08.046444 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"7d2c93101ebb8dac6591210948bfb5b7aee51db0b6dbec2a95e6fcfd00e77c60"} Jan 30 22:22:25 crc kubenswrapper[4721]: I0130 22:22:25.195510 4721 generic.go:334] "Generic (PLEG): container finished" podID="83d7f55f-b62e-4f29-9b14-9e6cddb27967" containerID="1e4b9dcc635b9ee05ceff244e7e9458005ded128ebae31fcf1d450e80f3adf47" exitCode=0 Jan 30 22:22:25 crc kubenswrapper[4721]: I0130 22:22:25.195603 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" event={"ID":"83d7f55f-b62e-4f29-9b14-9e6cddb27967","Type":"ContainerDied","Data":"1e4b9dcc635b9ee05ceff244e7e9458005ded128ebae31fcf1d450e80f3adf47"} Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.315796 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.353096 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-zbvl4"] Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.361587 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-zbvl4"] Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.418356 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2qv2\" (UniqueName: \"kubernetes.io/projected/83d7f55f-b62e-4f29-9b14-9e6cddb27967-kube-api-access-d2qv2\") pod \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.418586 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/83d7f55f-b62e-4f29-9b14-9e6cddb27967-host\") pod \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\" (UID: \"83d7f55f-b62e-4f29-9b14-9e6cddb27967\") " Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.418729 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83d7f55f-b62e-4f29-9b14-9e6cddb27967-host" (OuterVolumeSpecName: "host") pod "83d7f55f-b62e-4f29-9b14-9e6cddb27967" (UID: "83d7f55f-b62e-4f29-9b14-9e6cddb27967"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.419150 4721 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/83d7f55f-b62e-4f29-9b14-9e6cddb27967-host\") on node \"crc\" DevicePath \"\"" Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.427580 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83d7f55f-b62e-4f29-9b14-9e6cddb27967-kube-api-access-d2qv2" (OuterVolumeSpecName: "kube-api-access-d2qv2") pod "83d7f55f-b62e-4f29-9b14-9e6cddb27967" (UID: "83d7f55f-b62e-4f29-9b14-9e6cddb27967"). InnerVolumeSpecName "kube-api-access-d2qv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:22:26 crc kubenswrapper[4721]: I0130 22:22:26.521106 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2qv2\" (UniqueName: \"kubernetes.io/projected/83d7f55f-b62e-4f29-9b14-9e6cddb27967-kube-api-access-d2qv2\") on node \"crc\" DevicePath \"\"" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.218185 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c511bfbfcc708253f2fa4f390dcf89ac8294ddbec08ce160dbea4bae07363e1" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.218249 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-zbvl4" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.511051 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-fjgt2"] Jan 30 22:22:27 crc kubenswrapper[4721]: E0130 22:22:27.511570 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d7f55f-b62e-4f29-9b14-9e6cddb27967" containerName="container-00" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.511586 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d7f55f-b62e-4f29-9b14-9e6cddb27967" containerName="container-00" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.511818 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d7f55f-b62e-4f29-9b14-9e6cddb27967" containerName="container-00" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.512664 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.642936 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv692\" (UniqueName: \"kubernetes.io/projected/294dd18a-a98f-4077-9fff-c93227da9c2d-kube-api-access-pv692\") pod \"crc-debug-fjgt2\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.643201 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/294dd18a-a98f-4077-9fff-c93227da9c2d-host\") pod \"crc-debug-fjgt2\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.745852 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv692\" (UniqueName: \"kubernetes.io/projected/294dd18a-a98f-4077-9fff-c93227da9c2d-kube-api-access-pv692\") pod \"crc-debug-fjgt2\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.746003 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/294dd18a-a98f-4077-9fff-c93227da9c2d-host\") pod \"crc-debug-fjgt2\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.746124 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/294dd18a-a98f-4077-9fff-c93227da9c2d-host\") pod \"crc-debug-fjgt2\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.765174 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv692\" (UniqueName: \"kubernetes.io/projected/294dd18a-a98f-4077-9fff-c93227da9c2d-kube-api-access-pv692\") pod \"crc-debug-fjgt2\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: I0130 22:22:27.828627 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:27 crc kubenswrapper[4721]: W0130 22:22:27.865091 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod294dd18a_a98f_4077_9fff_c93227da9c2d.slice/crio-888b1715c134861ffc0fd3041845639f6fbbf8bd6365d4c21752eac1cce1f929 WatchSource:0}: Error finding container 888b1715c134861ffc0fd3041845639f6fbbf8bd6365d4c21752eac1cce1f929: Status 404 returned error can't find the container with id 888b1715c134861ffc0fd3041845639f6fbbf8bd6365d4c21752eac1cce1f929 Jan 30 22:22:28 crc kubenswrapper[4721]: I0130 22:22:28.106577 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83d7f55f-b62e-4f29-9b14-9e6cddb27967" path="/var/lib/kubelet/pods/83d7f55f-b62e-4f29-9b14-9e6cddb27967/volumes" Jan 30 22:22:28 crc kubenswrapper[4721]: I0130 22:22:28.229782 4721 generic.go:334] "Generic (PLEG): container finished" podID="294dd18a-a98f-4077-9fff-c93227da9c2d" containerID="b774be35368fdee626339c1be1986c459d8c9f51b5a4592bef081eb92ed37df4" exitCode=0 Jan 30 22:22:28 crc kubenswrapper[4721]: I0130 22:22:28.229830 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" event={"ID":"294dd18a-a98f-4077-9fff-c93227da9c2d","Type":"ContainerDied","Data":"b774be35368fdee626339c1be1986c459d8c9f51b5a4592bef081eb92ed37df4"} Jan 30 22:22:28 crc kubenswrapper[4721]: I0130 22:22:28.229864 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" event={"ID":"294dd18a-a98f-4077-9fff-c93227da9c2d","Type":"ContainerStarted","Data":"888b1715c134861ffc0fd3041845639f6fbbf8bd6365d4c21752eac1cce1f929"} Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.342779 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.477403 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/294dd18a-a98f-4077-9fff-c93227da9c2d-host\") pod \"294dd18a-a98f-4077-9fff-c93227da9c2d\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.477781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv692\" (UniqueName: \"kubernetes.io/projected/294dd18a-a98f-4077-9fff-c93227da9c2d-kube-api-access-pv692\") pod \"294dd18a-a98f-4077-9fff-c93227da9c2d\" (UID: \"294dd18a-a98f-4077-9fff-c93227da9c2d\") " Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.479811 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/294dd18a-a98f-4077-9fff-c93227da9c2d-host" (OuterVolumeSpecName: "host") pod "294dd18a-a98f-4077-9fff-c93227da9c2d" (UID: "294dd18a-a98f-4077-9fff-c93227da9c2d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.493472 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/294dd18a-a98f-4077-9fff-c93227da9c2d-kube-api-access-pv692" (OuterVolumeSpecName: "kube-api-access-pv692") pod "294dd18a-a98f-4077-9fff-c93227da9c2d" (UID: "294dd18a-a98f-4077-9fff-c93227da9c2d"). InnerVolumeSpecName "kube-api-access-pv692". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.579807 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv692\" (UniqueName: \"kubernetes.io/projected/294dd18a-a98f-4077-9fff-c93227da9c2d-kube-api-access-pv692\") on node \"crc\" DevicePath \"\"" Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.579844 4721 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/294dd18a-a98f-4077-9fff-c93227da9c2d-host\") on node \"crc\" DevicePath \"\"" Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.776794 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-fjgt2"] Jan 30 22:22:29 crc kubenswrapper[4721]: I0130 22:22:29.787366 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-fjgt2"] Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.105558 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="294dd18a-a98f-4077-9fff-c93227da9c2d" path="/var/lib/kubelet/pods/294dd18a-a98f-4077-9fff-c93227da9c2d/volumes" Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.260662 4721 scope.go:117] "RemoveContainer" containerID="b774be35368fdee626339c1be1986c459d8c9f51b5a4592bef081eb92ed37df4" Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.260802 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-fjgt2" Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.953054 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-l24lq"] Jan 30 22:22:30 crc kubenswrapper[4721]: E0130 22:22:30.953699 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="294dd18a-a98f-4077-9fff-c93227da9c2d" containerName="container-00" Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.953712 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="294dd18a-a98f-4077-9fff-c93227da9c2d" containerName="container-00" Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.953912 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="294dd18a-a98f-4077-9fff-c93227da9c2d" containerName="container-00" Jan 30 22:22:30 crc kubenswrapper[4721]: I0130 22:22:30.954558 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.006804 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6zwm\" (UniqueName: \"kubernetes.io/projected/bb54a665-ac45-4574-a61a-c00c37495879-kube-api-access-h6zwm\") pod \"crc-debug-l24lq\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.007008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb54a665-ac45-4574-a61a-c00c37495879-host\") pod \"crc-debug-l24lq\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.109270 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6zwm\" (UniqueName: \"kubernetes.io/projected/bb54a665-ac45-4574-a61a-c00c37495879-kube-api-access-h6zwm\") pod \"crc-debug-l24lq\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.109498 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb54a665-ac45-4574-a61a-c00c37495879-host\") pod \"crc-debug-l24lq\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.109625 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb54a665-ac45-4574-a61a-c00c37495879-host\") pod \"crc-debug-l24lq\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.132426 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6zwm\" (UniqueName: \"kubernetes.io/projected/bb54a665-ac45-4574-a61a-c00c37495879-kube-api-access-h6zwm\") pod \"crc-debug-l24lq\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: I0130 22:22:31.273967 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:31 crc kubenswrapper[4721]: W0130 22:22:31.300744 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb54a665_ac45_4574_a61a_c00c37495879.slice/crio-cd14832830831e217c9865c06b61c81e3b8241296765d8abf93dcbcfcce0803a WatchSource:0}: Error finding container cd14832830831e217c9865c06b61c81e3b8241296765d8abf93dcbcfcce0803a: Status 404 returned error can't find the container with id cd14832830831e217c9865c06b61c81e3b8241296765d8abf93dcbcfcce0803a Jan 30 22:22:32 crc kubenswrapper[4721]: I0130 22:22:32.281003 4721 generic.go:334] "Generic (PLEG): container finished" podID="bb54a665-ac45-4574-a61a-c00c37495879" containerID="95a6efbb5adf35081aa14a547500fde3396bf631a20bb508094197e37d57b02a" exitCode=0 Jan 30 22:22:32 crc kubenswrapper[4721]: I0130 22:22:32.281090 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-l24lq" event={"ID":"bb54a665-ac45-4574-a61a-c00c37495879","Type":"ContainerDied","Data":"95a6efbb5adf35081aa14a547500fde3396bf631a20bb508094197e37d57b02a"} Jan 30 22:22:32 crc kubenswrapper[4721]: I0130 22:22:32.281379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/crc-debug-l24lq" event={"ID":"bb54a665-ac45-4574-a61a-c00c37495879","Type":"ContainerStarted","Data":"cd14832830831e217c9865c06b61c81e3b8241296765d8abf93dcbcfcce0803a"} Jan 30 22:22:32 crc kubenswrapper[4721]: I0130 22:22:32.330249 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-l24lq"] Jan 30 22:22:32 crc kubenswrapper[4721]: I0130 22:22:32.339038 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9rfs8/crc-debug-l24lq"] Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.426311 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.459412 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb54a665-ac45-4574-a61a-c00c37495879-host\") pod \"bb54a665-ac45-4574-a61a-c00c37495879\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.459523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bb54a665-ac45-4574-a61a-c00c37495879-host" (OuterVolumeSpecName: "host") pod "bb54a665-ac45-4574-a61a-c00c37495879" (UID: "bb54a665-ac45-4574-a61a-c00c37495879"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.459579 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6zwm\" (UniqueName: \"kubernetes.io/projected/bb54a665-ac45-4574-a61a-c00c37495879-kube-api-access-h6zwm\") pod \"bb54a665-ac45-4574-a61a-c00c37495879\" (UID: \"bb54a665-ac45-4574-a61a-c00c37495879\") " Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.460374 4721 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb54a665-ac45-4574-a61a-c00c37495879-host\") on node \"crc\" DevicePath \"\"" Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.467697 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb54a665-ac45-4574-a61a-c00c37495879-kube-api-access-h6zwm" (OuterVolumeSpecName: "kube-api-access-h6zwm") pod "bb54a665-ac45-4574-a61a-c00c37495879" (UID: "bb54a665-ac45-4574-a61a-c00c37495879"). InnerVolumeSpecName "kube-api-access-h6zwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:22:33 crc kubenswrapper[4721]: I0130 22:22:33.562471 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6zwm\" (UniqueName: \"kubernetes.io/projected/bb54a665-ac45-4574-a61a-c00c37495879-kube-api-access-h6zwm\") on node \"crc\" DevicePath \"\"" Jan 30 22:22:34 crc kubenswrapper[4721]: I0130 22:22:34.103191 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb54a665-ac45-4574-a61a-c00c37495879" path="/var/lib/kubelet/pods/bb54a665-ac45-4574-a61a-c00c37495879/volumes" Jan 30 22:22:34 crc kubenswrapper[4721]: I0130 22:22:34.301380 4721 scope.go:117] "RemoveContainer" containerID="95a6efbb5adf35081aa14a547500fde3396bf631a20bb508094197e37d57b02a" Jan 30 22:22:34 crc kubenswrapper[4721]: I0130 22:22:34.301413 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/crc-debug-l24lq" Jan 30 22:23:00 crc kubenswrapper[4721]: I0130 22:23:00.987089 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/init-config-reloader/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.216383 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/alertmanager/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.222945 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/init-config-reloader/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.239597 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/config-reloader/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.454503 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-68b6df574b-x89tq_e418bacc-47a2-45cd-9bb3-35e42563c482/barbican-api/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.568149 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-68b6df574b-x89tq_e418bacc-47a2-45cd-9bb3-35e42563c482/barbican-api-log/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.580762 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6db69d5bd6-grvs7_925d81d0-2e07-4a27-a8d1-7edff62fe070/barbican-keystone-listener/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.833668 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6db69d5bd6-grvs7_925d81d0-2e07-4a27-a8d1-7edff62fe070/barbican-keystone-listener-log/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.850162 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6456f55dbc-h7p5b_7457bf9f-e5dd-47af-9346-898a62273a3a/barbican-worker-log/0.log" Jan 30 22:23:01 crc kubenswrapper[4721]: I0130 22:23:01.851911 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6456f55dbc-h7p5b_7457bf9f-e5dd-47af-9346-898a62273a3a/barbican-worker/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.073145 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr_0093f639-dd37-4e8d-86da-c6149cb3a4c4/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.182807 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/ceilometer-central-agent/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.305015 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/ceilometer-notification-agent/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.335281 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/proxy-httpd/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.413164 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/sg-core/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.627347 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e690f08-a69f-4b8a-9698-f66afbf94f43/cinder-api-log/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.633649 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e690f08-a69f-4b8a-9698-f66afbf94f43/cinder-api/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.764723 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6b2e94bb-fd95-448e-8ab0-b79d741fd7f5/cinder-scheduler/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.865084 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6b2e94bb-fd95-448e-8ab0-b79d741fd7f5/probe/0.log" Jan 30 22:23:02 crc kubenswrapper[4721]: I0130 22:23:02.990713 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_41460120-522d-44cf-a772-29cb623f9c14/cloudkitty-api-log/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.042103 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_41460120-522d-44cf-a772-29cb623f9c14/cloudkitty-api/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.249571 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_1a3aadfd-1e26-407f-98a8-c3f5681c2126/loki-compactor/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.254284 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-66dfd9bb-s89nx_f7847558-f6b6-4f0a-8fd3-45e0fef7fce4/loki-distributor/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.444967 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-7db4f4db8c-2plvn_9d09968c-71f8-4bad-855b-ebc5abb78989/gateway/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.540693 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz_a6518545-fdf0-4445-8e62-d7ca4816779d/gateway/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.835689 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_ae53bce6-479b-4d55-9fb5-2441850bec4a/loki-index-gateway/0.log" Jan 30 22:23:03 crc kubenswrapper[4721]: I0130 22:23:03.886397 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3/loki-ingester/0.log" Jan 30 22:23:04 crc kubenswrapper[4721]: I0130 22:23:04.137422 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r_6e77f4bd-bf5d-4043-ae9e-e938a4e99b69/loki-query-frontend/0.log" Jan 30 22:23:04 crc kubenswrapper[4721]: I0130 22:23:04.345719 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-795fd8f8cc-csn7z_3b976659-d481-4cd4-b1b1-72a7d465067d/loki-querier/0.log" Jan 30 22:23:04 crc kubenswrapper[4721]: I0130 22:23:04.665127 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj_48a8b210-aaab-46b3-8436-f4acab16a60a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:04 crc kubenswrapper[4721]: I0130 22:23:04.855076 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk_aeb46845-60c0-48ae-960e-4f138a1caf5e/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:05 crc kubenswrapper[4721]: I0130 22:23:05.576564 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-4gj7m_94aad825-d42e-410a-a415-ff4e1910ecdb/init/0.log" Jan 30 22:23:05 crc kubenswrapper[4721]: I0130 22:23:05.885767 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-4gj7m_94aad825-d42e-410a-a415-ff4e1910ecdb/dnsmasq-dns/0.log" Jan 30 22:23:05 crc kubenswrapper[4721]: I0130 22:23:05.915324 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-4gj7m_94aad825-d42e-410a-a415-ff4e1910ecdb/init/0.log" Jan 30 22:23:05 crc kubenswrapper[4721]: I0130 22:23:05.988433 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6_ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:06 crc kubenswrapper[4721]: I0130 22:23:06.214241 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_acb7c332-79bc-432b-b046-248772221388/glance-log/0.log" Jan 30 22:23:06 crc kubenswrapper[4721]: I0130 22:23:06.228077 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_acb7c332-79bc-432b-b046-248772221388/glance-httpd/0.log" Jan 30 22:23:06 crc kubenswrapper[4721]: I0130 22:23:06.438770 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9/glance-log/0.log" Jan 30 22:23:06 crc kubenswrapper[4721]: I0130 22:23:06.454113 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9/glance-httpd/0.log" Jan 30 22:23:06 crc kubenswrapper[4721]: I0130 22:23:06.663247 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr_f2c051d1-6a5d-4950-953d-204cc6adfc6e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:06 crc kubenswrapper[4721]: I0130 22:23:06.746086 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-p4zrm_d1bfbef9-b785-4687-a0fa-471a6b4b6957/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:07 crc kubenswrapper[4721]: I0130 22:23:07.399627 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29496841-6wgsv_33f22249-bfaa-4818-a56c-2d0192a8bef6/keystone-cron/0.log" Jan 30 22:23:07 crc kubenswrapper[4721]: I0130 22:23:07.589922 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-54b7957dff-42xvx_e94a796b-4938-444f-811d-dbba68141f41/keystone-api/0.log" Jan 30 22:23:07 crc kubenswrapper[4721]: I0130 22:23:07.599062 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_b8e82608-6b61-42c4-b4fc-6f1fe545e119/kube-state-metrics/0.log" Jan 30 22:23:07 crc kubenswrapper[4721]: I0130 22:23:07.828676 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4_75dd1a6a-3fe9-4016-bdb1-bbc9ec572417/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:08 crc kubenswrapper[4721]: I0130 22:23:08.182653 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7cccf5fc8f-zbdml_96081b17-acc8-4700-91da-9a966b7e7f1c/neutron-httpd/0.log" Jan 30 22:23:08 crc kubenswrapper[4721]: I0130 22:23:08.372027 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7cccf5fc8f-zbdml_96081b17-acc8-4700-91da-9a966b7e7f1c/neutron-api/0.log" Jan 30 22:23:08 crc kubenswrapper[4721]: I0130 22:23:08.491469 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k_433d1a2c-a03e-483a-9dba-2adde950cf1f/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.021370 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_03dc15ed-69f7-4a98-b586-a7e051ba2bbe/nova-api-log/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.236351 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_03dc15ed-69f7-4a98-b586-a7e051ba2bbe/nova-api-api/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.380695 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c01ff72c-19ac-4ae7-8c07-d9e0c01c669f/nova-cell0-conductor-conductor/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.583133 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_0a596943-21b2-4c3d-9687-150ce3bde8f7/cloudkitty-proc/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.638201 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_0ca8e7a1-e433-4c9a-9532-f695fedd853e/nova-cell1-conductor-conductor/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.706149 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_55612f9b-a463-4acf-9f8a-647372b6c4a0/nova-cell1-novncproxy-novncproxy/0.log" Jan 30 22:23:09 crc kubenswrapper[4721]: I0130 22:23:09.871522 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-g4xdp_e148581e-1ed2-4532-a179-f1491d58dc0e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:10 crc kubenswrapper[4721]: I0130 22:23:10.119845 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_766e6806-c4e1-4db9-9c4e-93a466d182f1/nova-metadata-log/0.log" Jan 30 22:23:10 crc kubenswrapper[4721]: I0130 22:23:10.344655 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_2e8d2389-e04d-4427-8e1e-ef0d8617a29f/nova-scheduler-scheduler/0.log" Jan 30 22:23:10 crc kubenswrapper[4721]: I0130 22:23:10.413556 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8bff96ff-2424-4622-8c4d-d866a4b28b21/mysql-bootstrap/0.log" Jan 30 22:23:10 crc kubenswrapper[4721]: I0130 22:23:10.693827 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8bff96ff-2424-4622-8c4d-d866a4b28b21/galera/0.log" Jan 30 22:23:10 crc kubenswrapper[4721]: I0130 22:23:10.696133 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8bff96ff-2424-4622-8c4d-d866a4b28b21/mysql-bootstrap/0.log" Jan 30 22:23:10 crc kubenswrapper[4721]: I0130 22:23:10.893976 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_96303720-27c1-495f-8597-5891c08c5e06/mysql-bootstrap/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.115169 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_766e6806-c4e1-4db9-9c4e-93a466d182f1/nova-metadata-metadata/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.184758 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_96303720-27c1-495f-8597-5891c08c5e06/mysql-bootstrap/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.222872 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_96303720-27c1-495f-8597-5891c08c5e06/galera/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.376230 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_fc69e094-e84a-44d5-9a2c-726bac11b1c2/openstackclient/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.509037 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4k958_522b5333-a647-446e-a261-b1828a1d20a3/ovn-controller/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.670466 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-fmzsr_133e2a95-0b74-4b44-9ea1-d6a37d548876/openstack-network-exporter/0.log" Jan 30 22:23:11 crc kubenswrapper[4721]: I0130 22:23:11.819629 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovsdb-server-init/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.020238 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovsdb-server-init/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.103785 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovs-vswitchd/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.119526 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovsdb-server/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.296894 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qbsgd_0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.354535 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cbab3069-54ee-4146-b912-5e59c0039f86/openstack-network-exporter/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.392109 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cbab3069-54ee-4146-b912-5e59c0039f86/ovn-northd/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.575617 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_63a5898f-2b47-44bb-85a0-1700940899c1/openstack-network-exporter/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.595417 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_63a5898f-2b47-44bb-85a0-1700940899c1/ovsdbserver-nb/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.824322 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_796cb9cb-aad7-4645-89ae-ae8764bfbe17/openstack-network-exporter/0.log" Jan 30 22:23:12 crc kubenswrapper[4721]: I0130 22:23:12.895362 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_796cb9cb-aad7-4645-89ae-ae8764bfbe17/ovsdbserver-sb/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.120163 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5c759f49d6-k22ln_9def88bd-3017-4ea9-8a12-b895aeb4b28f/placement-api/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.186511 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5c759f49d6-k22ln_9def88bd-3017-4ea9-8a12-b895aeb4b28f/placement-log/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.269625 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/init-config-reloader/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.469971 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/init-config-reloader/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.484535 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/config-reloader/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.544059 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/prometheus/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.586074 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/thanos-sidecar/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.753322 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_12f1cce2-7b07-4519-b1c3-15e57ed44cde/setup-container/0.log" Jan 30 22:23:13 crc kubenswrapper[4721]: I0130 22:23:13.923065 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_12f1cce2-7b07-4519-b1c3-15e57ed44cde/setup-container/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.015575 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7ccec6ec-8034-4a0f-88a6-b86751e0f22b/setup-container/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.064868 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_12f1cce2-7b07-4519-b1c3-15e57ed44cde/rabbitmq/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.385214 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7ccec6ec-8034-4a0f-88a6-b86751e0f22b/rabbitmq/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.470931 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk_ffae7484-d197-4caa-8553-151666fded73/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.474895 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7ccec6ec-8034-4a0f-88a6-b86751e0f22b/setup-container/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.579058 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-lf29q_97ad0a5f-02dd-48d9-93a4-b7c7b9462879/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.731482 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2_afddf697-9175-4a72-8226-bcb7030604f9/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.902593 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-jt6rq_982c1f39-3c88-4f1b-a5ea-4db039e1201e/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:14 crc kubenswrapper[4721]: I0130 22:23:14.997091 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-4989x_c878af65-d3fd-4eae-9818-a30e27c363ec/ssh-known-hosts-edpm-deployment/0.log" Jan 30 22:23:15 crc kubenswrapper[4721]: I0130 22:23:15.783617 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-746946b9f5-f7fdd_46e77d9a-8263-4821-be29-a13929dd4448/proxy-server/0.log" Jan 30 22:23:15 crc kubenswrapper[4721]: I0130 22:23:15.873584 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-746946b9f5-f7fdd_46e77d9a-8263-4821-be29-a13929dd4448/proxy-httpd/0.log" Jan 30 22:23:15 crc kubenswrapper[4721]: I0130 22:23:15.967119 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hkfr7_81c15104-7d30-43d8-9e3d-9ab1834959da/swift-ring-rebalance/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.047955 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-auditor/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.096037 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-reaper/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.228673 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-replicator/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.317190 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-auditor/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.326074 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-server/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.528282 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-server/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.528841 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-replicator/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.557084 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-updater/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.667656 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-auditor/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.790880 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-replicator/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.798492 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-expirer/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.821947 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-server/0.log" Jan 30 22:23:16 crc kubenswrapper[4721]: I0130 22:23:16.882258 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-updater/0.log" Jan 30 22:23:17 crc kubenswrapper[4721]: I0130 22:23:17.441314 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/rsync/0.log" Jan 30 22:23:17 crc kubenswrapper[4721]: I0130 22:23:17.518600 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/swift-recon-cron/0.log" Jan 30 22:23:17 crc kubenswrapper[4721]: I0130 22:23:17.552542 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj_bb52513c-6253-41f2-aa93-808d6b9cbb62/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:17 crc kubenswrapper[4721]: I0130 22:23:17.846078 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_177f7165-d126-4742-a778-e5e845c54fab/test-operator-logs-container/0.log" Jan 30 22:23:17 crc kubenswrapper[4721]: I0130 22:23:17.848083 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_519f6e4a-b90c-4146-8ac9-d03854442bdd/tempest-tests-tempest-tests-runner/0.log" Jan 30 22:23:18 crc kubenswrapper[4721]: I0130 22:23:18.011094 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-5np9m_8a1c37f6-c659-4344-ad91-49f56d8fd843/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:23:22 crc kubenswrapper[4721]: I0130 22:23:22.421062 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7548d1d0-371f-4bf8-a557-a9734c49a52e/memcached/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.157741 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/util/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.333420 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/util/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.368466 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/pull/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.379720 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/pull/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.562823 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/pull/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.605899 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/extract/0.log" Jan 30 22:23:46 crc kubenswrapper[4721]: I0130 22:23:46.637623 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/util/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.151915 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-qhpvg_ce8df3e5-ac5d-4782-97fe-b49e9342768a/manager/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.232742 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-ddmz2_6dfaa0a8-aa69-4d52-8740-b1098802644c/manager/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.435134 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-77cms_e5aed1e3-eebf-4e1b-ab1b-1b81b337374e/manager/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.564323 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-snvcj_2dbdf4c9-4962-45ea-ac32-adbb848529d7/manager/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.656398 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-fvxk2_6a148cdc-0a77-4f57-b5e6-1b2acf90a900/manager/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.792076 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-m79fw_b6d746e4-3768-42df-956a-c700072e4e4c/manager/0.log" Jan 30 22:23:47 crc kubenswrapper[4721]: I0130 22:23:47.981444 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-mbz59_782f1962-bc39-4162-84ae-acad49911f45/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.124063 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-r42gp_b10d3fdb-7237-4461-ba03-ed926092791f/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.225799 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-jztwj_7d5fad49-066d-48d6-a9f0-0c3a105df525/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.339504 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-scnz2_64d5e6e7-b654-4060-9ba5-82e52e172a3b/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.514046 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-6qxsd_e7dd1fd7-f720-45b2-86b4-bc056b1ef360/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.658860 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-j5m4f_319096ad-d67b-4344-8bb2-290aafd57bc0/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.818329 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-qhqfv_3d3b8ade-729b-4dfc-9ae8-ead1999f9657/manager/0.log" Jan 30 22:23:48 crc kubenswrapper[4721]: I0130 22:23:48.913195 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-2cmth_c53f8f24-7f92-4255-ad09-8a729b4159ab/manager/0.log" Jan 30 22:23:49 crc kubenswrapper[4721]: I0130 22:23:49.025708 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf_34543b19-ae6c-4a39-ad40-0dff196f0fd6/manager/0.log" Jan 30 22:23:49 crc kubenswrapper[4721]: I0130 22:23:49.265097 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-8584c7c99f-224n7_16ce971e-b375-4472-bbf9-6310b8524952/operator/0.log" Jan 30 22:23:49 crc kubenswrapper[4721]: I0130 22:23:49.571879 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-hvf87_c9bf578a-adbc-4168-a09b-edf084023bfa/registry-server/0.log" Jan 30 22:23:49 crc kubenswrapper[4721]: I0130 22:23:49.713123 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-x44jj_5be9ffe8-a1a4-4aa1-a704-5443e1ef640b/manager/0.log" Jan 30 22:23:49 crc kubenswrapper[4721]: I0130 22:23:49.872863 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-kgpcd_d4433a61-fd64-4240-8a12-8d86a8a52e77/manager/0.log" Jan 30 22:23:50 crc kubenswrapper[4721]: I0130 22:23:50.089378 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4r7sw_bca228e2-5d0f-415b-943b-530f9291396a/operator/0.log" Jan 30 22:23:50 crc kubenswrapper[4721]: I0130 22:23:50.143287 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-6ptfc_d9650011-7842-4b52-bf3b-728e40294cb4/manager/0.log" Jan 30 22:23:50 crc kubenswrapper[4721]: I0130 22:23:50.601515 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-jktzs_6e34b8b8-6fc5-4444-b957-b6325671ec2a/manager/0.log" Jan 30 22:23:50 crc kubenswrapper[4721]: I0130 22:23:50.646175 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-c2c8k_e11d1820-45a9-4ecc-b400-7bbcb6f8b69e/manager/0.log" Jan 30 22:23:50 crc kubenswrapper[4721]: I0130 22:23:50.871361 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-57c48854c9-4r8wb_1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c/manager/0.log" Jan 30 22:23:50 crc kubenswrapper[4721]: I0130 22:23:50.964352 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-fd77b8dd7-4t9rw_54f2f57d-0269-4ba8-94f5-04873f29e16c/manager/0.log" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.310646 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2fc"] Jan 30 22:23:53 crc kubenswrapper[4721]: E0130 22:23:53.315026 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb54a665-ac45-4574-a61a-c00c37495879" containerName="container-00" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.315055 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb54a665-ac45-4574-a61a-c00c37495879" containerName="container-00" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.315262 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb54a665-ac45-4574-a61a-c00c37495879" containerName="container-00" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.317025 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.324274 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2fc"] Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.408199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-utilities\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.408272 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8l6m\" (UniqueName: \"kubernetes.io/projected/07871f96-0eb5-4222-b564-88ef838abd1a-kube-api-access-c8l6m\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.408331 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-catalog-content\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.510371 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-utilities\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.510449 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8l6m\" (UniqueName: \"kubernetes.io/projected/07871f96-0eb5-4222-b564-88ef838abd1a-kube-api-access-c8l6m\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.510480 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-catalog-content\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.510984 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-utilities\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.511000 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-catalog-content\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.530286 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8l6m\" (UniqueName: \"kubernetes.io/projected/07871f96-0eb5-4222-b564-88ef838abd1a-kube-api-access-c8l6m\") pod \"redhat-marketplace-fg2fc\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:53 crc kubenswrapper[4721]: I0130 22:23:53.643546 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:23:54 crc kubenswrapper[4721]: I0130 22:23:54.194801 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2fc"] Jan 30 22:23:55 crc kubenswrapper[4721]: I0130 22:23:55.084818 4721 generic.go:334] "Generic (PLEG): container finished" podID="07871f96-0eb5-4222-b564-88ef838abd1a" containerID="e3ea3e15630803e4c2a143c8e0903618fe321bc8b51a16ef02aa15879fc8d8b4" exitCode=0 Jan 30 22:23:55 crc kubenswrapper[4721]: I0130 22:23:55.084897 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerDied","Data":"e3ea3e15630803e4c2a143c8e0903618fe321bc8b51a16ef02aa15879fc8d8b4"} Jan 30 22:23:55 crc kubenswrapper[4721]: I0130 22:23:55.085401 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerStarted","Data":"ad7b1a1d4578a1c2c9fbe9ce80c8e22b4328f5300147893d90679df7872aa20a"} Jan 30 22:23:56 crc kubenswrapper[4721]: I0130 22:23:56.102490 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerStarted","Data":"69aea1b267c7e0682629cd8901fa6e11a72ccc0e35faaf0c6e7452bc75cd0f5e"} Jan 30 22:23:58 crc kubenswrapper[4721]: I0130 22:23:58.115340 4721 generic.go:334] "Generic (PLEG): container finished" podID="07871f96-0eb5-4222-b564-88ef838abd1a" containerID="69aea1b267c7e0682629cd8901fa6e11a72ccc0e35faaf0c6e7452bc75cd0f5e" exitCode=0 Jan 30 22:23:58 crc kubenswrapper[4721]: I0130 22:23:58.115428 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerDied","Data":"69aea1b267c7e0682629cd8901fa6e11a72ccc0e35faaf0c6e7452bc75cd0f5e"} Jan 30 22:23:59 crc kubenswrapper[4721]: I0130 22:23:59.127649 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerStarted","Data":"4374f6de8477b807f3e0dfa83233b3dcefc9c04646642d0d20382cd8d7487aae"} Jan 30 22:23:59 crc kubenswrapper[4721]: I0130 22:23:59.153902 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fg2fc" podStartSLOduration=2.5742301469999997 podStartE2EDuration="6.153883857s" podCreationTimestamp="2026-01-30 22:23:53 +0000 UTC" firstStartedPulling="2026-01-30 22:23:55.087578351 +0000 UTC m=+4023.879479597" lastFinishedPulling="2026-01-30 22:23:58.667232051 +0000 UTC m=+4027.459133307" observedRunningTime="2026-01-30 22:23:59.145074781 +0000 UTC m=+4027.936976027" watchObservedRunningTime="2026-01-30 22:23:59.153883857 +0000 UTC m=+4027.945785103" Jan 30 22:24:03 crc kubenswrapper[4721]: I0130 22:24:03.644234 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:24:03 crc kubenswrapper[4721]: I0130 22:24:03.644861 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:24:03 crc kubenswrapper[4721]: I0130 22:24:03.694095 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:24:04 crc kubenswrapper[4721]: I0130 22:24:04.225850 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:24:07 crc kubenswrapper[4721]: I0130 22:24:07.300880 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2fc"] Jan 30 22:24:07 crc kubenswrapper[4721]: I0130 22:24:07.301747 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fg2fc" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="registry-server" containerID="cri-o://4374f6de8477b807f3e0dfa83233b3dcefc9c04646642d0d20382cd8d7487aae" gracePeriod=2 Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.216965 4721 generic.go:334] "Generic (PLEG): container finished" podID="07871f96-0eb5-4222-b564-88ef838abd1a" containerID="4374f6de8477b807f3e0dfa83233b3dcefc9c04646642d0d20382cd8d7487aae" exitCode=0 Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.217041 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerDied","Data":"4374f6de8477b807f3e0dfa83233b3dcefc9c04646642d0d20382cd8d7487aae"} Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.696149 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.722892 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-catalog-content\") pod \"07871f96-0eb5-4222-b564-88ef838abd1a\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.722969 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-utilities\") pod \"07871f96-0eb5-4222-b564-88ef838abd1a\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.723124 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8l6m\" (UniqueName: \"kubernetes.io/projected/07871f96-0eb5-4222-b564-88ef838abd1a-kube-api-access-c8l6m\") pod \"07871f96-0eb5-4222-b564-88ef838abd1a\" (UID: \"07871f96-0eb5-4222-b564-88ef838abd1a\") " Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.724162 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-utilities" (OuterVolumeSpecName: "utilities") pod "07871f96-0eb5-4222-b564-88ef838abd1a" (UID: "07871f96-0eb5-4222-b564-88ef838abd1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.730397 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07871f96-0eb5-4222-b564-88ef838abd1a-kube-api-access-c8l6m" (OuterVolumeSpecName: "kube-api-access-c8l6m") pod "07871f96-0eb5-4222-b564-88ef838abd1a" (UID: "07871f96-0eb5-4222-b564-88ef838abd1a"). InnerVolumeSpecName "kube-api-access-c8l6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.745131 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07871f96-0eb5-4222-b564-88ef838abd1a" (UID: "07871f96-0eb5-4222-b564-88ef838abd1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.825637 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.825903 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07871f96-0eb5-4222-b564-88ef838abd1a-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:24:08 crc kubenswrapper[4721]: I0130 22:24:08.825988 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8l6m\" (UniqueName: \"kubernetes.io/projected/07871f96-0eb5-4222-b564-88ef838abd1a-kube-api-access-c8l6m\") on node \"crc\" DevicePath \"\"" Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.252282 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2fc" event={"ID":"07871f96-0eb5-4222-b564-88ef838abd1a","Type":"ContainerDied","Data":"ad7b1a1d4578a1c2c9fbe9ce80c8e22b4328f5300147893d90679df7872aa20a"} Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.252363 4721 scope.go:117] "RemoveContainer" containerID="4374f6de8477b807f3e0dfa83233b3dcefc9c04646642d0d20382cd8d7487aae" Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.252579 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2fc" Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.289478 4721 scope.go:117] "RemoveContainer" containerID="69aea1b267c7e0682629cd8901fa6e11a72ccc0e35faaf0c6e7452bc75cd0f5e" Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.294319 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2fc"] Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.304848 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2fc"] Jan 30 22:24:09 crc kubenswrapper[4721]: I0130 22:24:09.327568 4721 scope.go:117] "RemoveContainer" containerID="e3ea3e15630803e4c2a143c8e0903618fe321bc8b51a16ef02aa15879fc8d8b4" Jan 30 22:24:10 crc kubenswrapper[4721]: I0130 22:24:10.107251 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" path="/var/lib/kubelet/pods/07871f96-0eb5-4222-b564-88ef838abd1a/volumes" Jan 30 22:24:15 crc kubenswrapper[4721]: I0130 22:24:15.221146 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-m2qpz_36b59aa6-8bde-4935-82ce-04ef6d8ec10c/control-plane-machine-set-operator/0.log" Jan 30 22:24:15 crc kubenswrapper[4721]: I0130 22:24:15.402575 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rhwvl_834ba560-2a16-437b-8d57-20a0017ee78f/kube-rbac-proxy/0.log" Jan 30 22:24:15 crc kubenswrapper[4721]: I0130 22:24:15.434146 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rhwvl_834ba560-2a16-437b-8d57-20a0017ee78f/machine-api-operator/0.log" Jan 30 22:24:29 crc kubenswrapper[4721]: I0130 22:24:29.448781 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:24:29 crc kubenswrapper[4721]: I0130 22:24:29.449252 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:24:29 crc kubenswrapper[4721]: I0130 22:24:29.851395 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-dwdzb_d4f08327-9c54-4b81-a397-77de365f3c7d/cert-manager-controller/0.log" Jan 30 22:24:30 crc kubenswrapper[4721]: I0130 22:24:30.012418 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-kv2jr_02aa71d8-1558-4083-b360-d40f9bd180fb/cert-manager-cainjector/0.log" Jan 30 22:24:30 crc kubenswrapper[4721]: I0130 22:24:30.132043 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-7wqvq_7a28e689-3208-4314-a5d9-c06c110c2482/cert-manager-webhook/0.log" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.478039 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g4m6p"] Jan 30 22:24:43 crc kubenswrapper[4721]: E0130 22:24:43.480922 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="registry-server" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.481009 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="registry-server" Jan 30 22:24:43 crc kubenswrapper[4721]: E0130 22:24:43.481095 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="extract-content" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.481169 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="extract-content" Jan 30 22:24:43 crc kubenswrapper[4721]: E0130 22:24:43.481242 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="extract-utilities" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.481317 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="extract-utilities" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.481588 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="07871f96-0eb5-4222-b564-88ef838abd1a" containerName="registry-server" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.483223 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.490380 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g4m6p"] Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.649446 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwpnt\" (UniqueName: \"kubernetes.io/projected/157c43df-507b-4a53-b11d-d4a504254799-kube-api-access-dwpnt\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.650172 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-utilities\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.650377 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-catalog-content\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.752966 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-catalog-content\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.753105 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwpnt\" (UniqueName: \"kubernetes.io/projected/157c43df-507b-4a53-b11d-d4a504254799-kube-api-access-dwpnt\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.753270 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-utilities\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.753635 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-catalog-content\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.753797 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-utilities\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.785368 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwpnt\" (UniqueName: \"kubernetes.io/projected/157c43df-507b-4a53-b11d-d4a504254799-kube-api-access-dwpnt\") pod \"certified-operators-g4m6p\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:43 crc kubenswrapper[4721]: I0130 22:24:43.821148 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:44 crc kubenswrapper[4721]: I0130 22:24:44.365532 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g4m6p"] Jan 30 22:24:44 crc kubenswrapper[4721]: I0130 22:24:44.572671 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerStarted","Data":"f31fb6b3750af9036237caf1c3dd304bc68d7347ddae068e74537147ba60fc88"} Jan 30 22:24:45 crc kubenswrapper[4721]: I0130 22:24:45.583442 4721 generic.go:334] "Generic (PLEG): container finished" podID="157c43df-507b-4a53-b11d-d4a504254799" containerID="36bed66d0f9cf3f2fc46a76112add0d76c2ad55f598ebfaad7f65967dcf86fcd" exitCode=0 Jan 30 22:24:45 crc kubenswrapper[4721]: I0130 22:24:45.583594 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerDied","Data":"36bed66d0f9cf3f2fc46a76112add0d76c2ad55f598ebfaad7f65967dcf86fcd"} Jan 30 22:24:46 crc kubenswrapper[4721]: I0130 22:24:46.047459 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-4n7dn_13af9eb8-866f-4f4f-9698-e1208720edea/nmstate-console-plugin/0.log" Jan 30 22:24:46 crc kubenswrapper[4721]: I0130 22:24:46.263732 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-mqjg9_acdc2ab0-dd4b-4cb0-a325-ae7569073244/nmstate-handler/0.log" Jan 30 22:24:46 crc kubenswrapper[4721]: I0130 22:24:46.307351 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-cv4hc_fc4f4701-25d8-4af1-9128-d625c2448550/kube-rbac-proxy/0.log" Jan 30 22:24:46 crc kubenswrapper[4721]: I0130 22:24:46.942134 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-swldn_9ff9ae3f-3898-4963-996f-5ee35048f5af/nmstate-operator/0.log" Jan 30 22:24:46 crc kubenswrapper[4721]: I0130 22:24:46.973751 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-cv4hc_fc4f4701-25d8-4af1-9128-d625c2448550/nmstate-metrics/0.log" Jan 30 22:24:47 crc kubenswrapper[4721]: I0130 22:24:47.166235 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-t8rgz_b623908d-b2f5-49d0-9810-a2638fee1d6a/nmstate-webhook/0.log" Jan 30 22:24:47 crc kubenswrapper[4721]: I0130 22:24:47.606290 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerStarted","Data":"5426d9e9b5b8019b11e27579a5b391d5b139857c9a79c66412e31bfe0ed18a17"} Jan 30 22:24:48 crc kubenswrapper[4721]: I0130 22:24:48.619327 4721 generic.go:334] "Generic (PLEG): container finished" podID="157c43df-507b-4a53-b11d-d4a504254799" containerID="5426d9e9b5b8019b11e27579a5b391d5b139857c9a79c66412e31bfe0ed18a17" exitCode=0 Jan 30 22:24:48 crc kubenswrapper[4721]: I0130 22:24:48.619425 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerDied","Data":"5426d9e9b5b8019b11e27579a5b391d5b139857c9a79c66412e31bfe0ed18a17"} Jan 30 22:24:50 crc kubenswrapper[4721]: I0130 22:24:50.639871 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerStarted","Data":"33f197ec1e03493bd13a98e70d2f052bafe777b9a4426e0729a828357da1c8a8"} Jan 30 22:24:50 crc kubenswrapper[4721]: I0130 22:24:50.664106 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g4m6p" podStartSLOduration=4.047050416 podStartE2EDuration="7.664086906s" podCreationTimestamp="2026-01-30 22:24:43 +0000 UTC" firstStartedPulling="2026-01-30 22:24:45.585747013 +0000 UTC m=+4074.377648259" lastFinishedPulling="2026-01-30 22:24:49.202783503 +0000 UTC m=+4077.994684749" observedRunningTime="2026-01-30 22:24:50.656956013 +0000 UTC m=+4079.448857259" watchObservedRunningTime="2026-01-30 22:24:50.664086906 +0000 UTC m=+4079.455988152" Jan 30 22:24:53 crc kubenswrapper[4721]: I0130 22:24:53.821999 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:53 crc kubenswrapper[4721]: I0130 22:24:53.822652 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:53 crc kubenswrapper[4721]: I0130 22:24:53.868924 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:54 crc kubenswrapper[4721]: I0130 22:24:54.729268 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:54 crc kubenswrapper[4721]: I0130 22:24:54.778339 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g4m6p"] Jan 30 22:24:56 crc kubenswrapper[4721]: I0130 22:24:56.698229 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g4m6p" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="registry-server" containerID="cri-o://33f197ec1e03493bd13a98e70d2f052bafe777b9a4426e0729a828357da1c8a8" gracePeriod=2 Jan 30 22:24:57 crc kubenswrapper[4721]: I0130 22:24:57.712428 4721 generic.go:334] "Generic (PLEG): container finished" podID="157c43df-507b-4a53-b11d-d4a504254799" containerID="33f197ec1e03493bd13a98e70d2f052bafe777b9a4426e0729a828357da1c8a8" exitCode=0 Jan 30 22:24:57 crc kubenswrapper[4721]: I0130 22:24:57.712528 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerDied","Data":"33f197ec1e03493bd13a98e70d2f052bafe777b9a4426e0729a828357da1c8a8"} Jan 30 22:24:57 crc kubenswrapper[4721]: I0130 22:24:57.930365 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.068618 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwpnt\" (UniqueName: \"kubernetes.io/projected/157c43df-507b-4a53-b11d-d4a504254799-kube-api-access-dwpnt\") pod \"157c43df-507b-4a53-b11d-d4a504254799\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.068678 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-utilities\") pod \"157c43df-507b-4a53-b11d-d4a504254799\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.069610 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-catalog-content\") pod \"157c43df-507b-4a53-b11d-d4a504254799\" (UID: \"157c43df-507b-4a53-b11d-d4a504254799\") " Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.069742 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-utilities" (OuterVolumeSpecName: "utilities") pod "157c43df-507b-4a53-b11d-d4a504254799" (UID: "157c43df-507b-4a53-b11d-d4a504254799"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.072060 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.075142 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157c43df-507b-4a53-b11d-d4a504254799-kube-api-access-dwpnt" (OuterVolumeSpecName: "kube-api-access-dwpnt") pod "157c43df-507b-4a53-b11d-d4a504254799" (UID: "157c43df-507b-4a53-b11d-d4a504254799"). InnerVolumeSpecName "kube-api-access-dwpnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.125175 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "157c43df-507b-4a53-b11d-d4a504254799" (UID: "157c43df-507b-4a53-b11d-d4a504254799"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.177460 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwpnt\" (UniqueName: \"kubernetes.io/projected/157c43df-507b-4a53-b11d-d4a504254799-kube-api-access-dwpnt\") on node \"crc\" DevicePath \"\"" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.177503 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157c43df-507b-4a53-b11d-d4a504254799-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.723112 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4m6p" event={"ID":"157c43df-507b-4a53-b11d-d4a504254799","Type":"ContainerDied","Data":"f31fb6b3750af9036237caf1c3dd304bc68d7347ddae068e74537147ba60fc88"} Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.723462 4721 scope.go:117] "RemoveContainer" containerID="33f197ec1e03493bd13a98e70d2f052bafe777b9a4426e0729a828357da1c8a8" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.723168 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4m6p" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.752479 4721 scope.go:117] "RemoveContainer" containerID="5426d9e9b5b8019b11e27579a5b391d5b139857c9a79c66412e31bfe0ed18a17" Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.761401 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g4m6p"] Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.772038 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g4m6p"] Jan 30 22:24:58 crc kubenswrapper[4721]: I0130 22:24:58.772146 4721 scope.go:117] "RemoveContainer" containerID="36bed66d0f9cf3f2fc46a76112add0d76c2ad55f598ebfaad7f65967dcf86fcd" Jan 30 22:24:59 crc kubenswrapper[4721]: I0130 22:24:59.448415 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:24:59 crc kubenswrapper[4721]: I0130 22:24:59.448761 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:25:00 crc kubenswrapper[4721]: I0130 22:25:00.105632 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="157c43df-507b-4a53-b11d-d4a504254799" path="/var/lib/kubelet/pods/157c43df-507b-4a53-b11d-d4a504254799/volumes" Jan 30 22:25:02 crc kubenswrapper[4721]: I0130 22:25:02.462960 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/kube-rbac-proxy/0.log" Jan 30 22:25:02 crc kubenswrapper[4721]: I0130 22:25:02.544489 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/manager/0.log" Jan 30 22:25:17 crc kubenswrapper[4721]: I0130 22:25:17.409031 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-lxm2h_64ed6731-ff9c-4a61-b696-00dcac24cb8d/prometheus-operator/0.log" Jan 30 22:25:17 crc kubenswrapper[4721]: I0130 22:25:17.604748 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-25fb7_90e03b69-5f4f-469a-a8a0-82bc942a47e7/prometheus-operator-admission-webhook/0.log" Jan 30 22:25:17 crc kubenswrapper[4721]: I0130 22:25:17.704538 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-98r68_5c65d4e6-4f21-4298-a63e-7390c6588e3f/prometheus-operator-admission-webhook/0.log" Jan 30 22:25:18 crc kubenswrapper[4721]: I0130 22:25:18.088041 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hnrqb_e7ea1c63-c2af-4258-9864-8e09c708d507/perses-operator/0.log" Jan 30 22:25:18 crc kubenswrapper[4721]: I0130 22:25:18.121140 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rw7xg_6007c115-e448-4886-9aa2-14a72217c0bd/operator/0.log" Jan 30 22:25:29 crc kubenswrapper[4721]: I0130 22:25:29.448896 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:25:29 crc kubenswrapper[4721]: I0130 22:25:29.449649 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:25:29 crc kubenswrapper[4721]: I0130 22:25:29.449834 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:25:29 crc kubenswrapper[4721]: I0130 22:25:29.450625 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d2c93101ebb8dac6591210948bfb5b7aee51db0b6dbec2a95e6fcfd00e77c60"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:25:29 crc kubenswrapper[4721]: I0130 22:25:29.450681 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://7d2c93101ebb8dac6591210948bfb5b7aee51db0b6dbec2a95e6fcfd00e77c60" gracePeriod=600 Jan 30 22:25:30 crc kubenswrapper[4721]: I0130 22:25:30.023696 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="7d2c93101ebb8dac6591210948bfb5b7aee51db0b6dbec2a95e6fcfd00e77c60" exitCode=0 Jan 30 22:25:30 crc kubenswrapper[4721]: I0130 22:25:30.023784 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"7d2c93101ebb8dac6591210948bfb5b7aee51db0b6dbec2a95e6fcfd00e77c60"} Jan 30 22:25:30 crc kubenswrapper[4721]: I0130 22:25:30.024129 4721 scope.go:117] "RemoveContainer" containerID="086a31ded236f8b1b04e7015cc131711557796117e96736ab51b6ea29ce4b20f" Jan 30 22:25:31 crc kubenswrapper[4721]: I0130 22:25:31.036848 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4"} Jan 30 22:25:33 crc kubenswrapper[4721]: I0130 22:25:33.919104 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-4xz99_852b81d6-0da5-4035-841b-2613bd3f2561/kube-rbac-proxy/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.084583 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-4xz99_852b81d6-0da5-4035-841b-2613bd3f2561/controller/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.186554 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.367087 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.371593 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.428586 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.447760 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.684041 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.690977 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.719483 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.738648 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.924354 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.962965 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:25:34 crc kubenswrapper[4721]: I0130 22:25:34.971283 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.068205 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/controller/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.188271 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/frr-metrics/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.213868 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/kube-rbac-proxy/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.315935 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/kube-rbac-proxy-frr/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.449998 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/reloader/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.619261 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-t5qjr_3aee18f8-337b-4dfc-9951-c44ea52f5193/frr-k8s-webhook-server/0.log" Jan 30 22:25:35 crc kubenswrapper[4721]: I0130 22:25:35.776201 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5785c9bddd-96xvw_58594e33-cffd-4e67-99a7-7f3fb6b0d6f0/manager/0.log" Jan 30 22:25:36 crc kubenswrapper[4721]: I0130 22:25:36.226930 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5bf68458df-dm6mf_dae4a1c8-c40a-4506-b9c8-b2146ef8c480/webhook-server/0.log" Jan 30 22:25:36 crc kubenswrapper[4721]: I0130 22:25:36.281827 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-flglj_d5447d1b-a776-4ddb-a90a-e926273205f3/kube-rbac-proxy/0.log" Jan 30 22:25:36 crc kubenswrapper[4721]: I0130 22:25:36.669923 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/frr/0.log" Jan 30 22:25:36 crc kubenswrapper[4721]: I0130 22:25:36.865751 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-flglj_d5447d1b-a776-4ddb-a90a-e926273205f3/speaker/0.log" Jan 30 22:25:51 crc kubenswrapper[4721]: I0130 22:25:51.677660 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/util/0.log" Jan 30 22:25:51 crc kubenswrapper[4721]: I0130 22:25:51.836955 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/util/0.log" Jan 30 22:25:51 crc kubenswrapper[4721]: I0130 22:25:51.867628 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/pull/0.log" Jan 30 22:25:51 crc kubenswrapper[4721]: I0130 22:25:51.947631 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/pull/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.174229 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/pull/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.177718 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/util/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.179644 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/extract/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.342507 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/util/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.522690 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/pull/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.578771 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/util/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.589811 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/pull/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.741649 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/util/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.747936 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/pull/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.763402 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/extract/0.log" Jan 30 22:25:52 crc kubenswrapper[4721]: I0130 22:25:52.935834 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-utilities/0.log" Jan 30 22:25:53 crc kubenswrapper[4721]: I0130 22:25:53.150967 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-utilities/0.log" Jan 30 22:25:53 crc kubenswrapper[4721]: I0130 22:25:53.194745 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-content/0.log" Jan 30 22:25:53 crc kubenswrapper[4721]: I0130 22:25:53.195195 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-content/0.log" Jan 30 22:25:53 crc kubenswrapper[4721]: I0130 22:25:53.415877 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-utilities/0.log" Jan 30 22:25:53 crc kubenswrapper[4721]: I0130 22:25:53.427530 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-content/0.log" Jan 30 22:25:53 crc kubenswrapper[4721]: I0130 22:25:53.698461 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-utilities/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.050624 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-content/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.052028 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-utilities/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.096829 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-content/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.106266 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/registry-server/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.419821 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-utilities/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.432695 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-content/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.621127 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-jjn55_8da1faa6-dbea-4a4c-a83d-b6a51551ab85/marketplace-operator/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.704088 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-utilities/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.987560 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-content/0.log" Jan 30 22:25:54 crc kubenswrapper[4721]: I0130 22:25:54.991369 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-utilities/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.007162 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-content/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.072089 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/registry-server/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.205671 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-content/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.230112 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-utilities/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.329555 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-utilities/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.353802 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/registry-server/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.555929 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-content/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.570866 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-utilities/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.581764 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-content/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.756594 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-utilities/0.log" Jan 30 22:25:55 crc kubenswrapper[4721]: I0130 22:25:55.761182 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-content/0.log" Jan 30 22:25:56 crc kubenswrapper[4721]: I0130 22:25:56.245479 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/registry-server/0.log" Jan 30 22:26:10 crc kubenswrapper[4721]: I0130 22:26:10.885665 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-lxm2h_64ed6731-ff9c-4a61-b696-00dcac24cb8d/prometheus-operator/0.log" Jan 30 22:26:10 crc kubenswrapper[4721]: I0130 22:26:10.946930 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-98r68_5c65d4e6-4f21-4298-a63e-7390c6588e3f/prometheus-operator-admission-webhook/0.log" Jan 30 22:26:10 crc kubenswrapper[4721]: I0130 22:26:10.961018 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-25fb7_90e03b69-5f4f-469a-a8a0-82bc942a47e7/prometheus-operator-admission-webhook/0.log" Jan 30 22:26:11 crc kubenswrapper[4721]: I0130 22:26:11.092259 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rw7xg_6007c115-e448-4886-9aa2-14a72217c0bd/operator/0.log" Jan 30 22:26:11 crc kubenswrapper[4721]: I0130 22:26:11.135765 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hnrqb_e7ea1c63-c2af-4258-9864-8e09c708d507/perses-operator/0.log" Jan 30 22:26:25 crc kubenswrapper[4721]: I0130 22:26:25.310841 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/kube-rbac-proxy/0.log" Jan 30 22:26:25 crc kubenswrapper[4721]: I0130 22:26:25.365894 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/manager/0.log" Jan 30 22:26:47 crc kubenswrapper[4721]: E0130 22:26:47.414912 4721 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.20:51744->38.102.83.20:38213: read tcp 38.102.83.20:51744->38.102.83.20:38213: read: connection reset by peer Jan 30 22:27:59 crc kubenswrapper[4721]: I0130 22:27:59.449011 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:27:59 crc kubenswrapper[4721]: I0130 22:27:59.449746 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:28:13 crc kubenswrapper[4721]: I0130 22:28:13.634090 4721 generic.go:334] "Generic (PLEG): container finished" podID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerID="0de58a4330234414fad2e6b9cebb6610dd420b8e8af1b12593fc1835a00cbcd5" exitCode=0 Jan 30 22:28:13 crc kubenswrapper[4721]: I0130 22:28:13.634166 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" event={"ID":"dd16370f-a0bb-4f72-8822-c4c87a37ba39","Type":"ContainerDied","Data":"0de58a4330234414fad2e6b9cebb6610dd420b8e8af1b12593fc1835a00cbcd5"} Jan 30 22:28:13 crc kubenswrapper[4721]: I0130 22:28:13.635386 4721 scope.go:117] "RemoveContainer" containerID="0de58a4330234414fad2e6b9cebb6610dd420b8e8af1b12593fc1835a00cbcd5" Jan 30 22:28:14 crc kubenswrapper[4721]: I0130 22:28:14.094342 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9rfs8_must-gather-zjqmz_dd16370f-a0bb-4f72-8822-c4c87a37ba39/gather/0.log" Jan 30 22:28:17 crc kubenswrapper[4721]: I0130 22:28:17.216854 4721 scope.go:117] "RemoveContainer" containerID="1e4b9dcc635b9ee05ceff244e7e9458005ded128ebae31fcf1d450e80f3adf47" Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.302151 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9rfs8/must-gather-zjqmz"] Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.302903 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="copy" containerID="cri-o://ad4dad7c8fd5d8cc04f3e7f1178e0fe6e87d38da599dfb4dc7e1c56c62f179fc" gracePeriod=2 Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.313666 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9rfs8/must-gather-zjqmz"] Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.731792 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9rfs8_must-gather-zjqmz_dd16370f-a0bb-4f72-8822-c4c87a37ba39/copy/0.log" Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.732431 4721 generic.go:334] "Generic (PLEG): container finished" podID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerID="ad4dad7c8fd5d8cc04f3e7f1178e0fe6e87d38da599dfb4dc7e1c56c62f179fc" exitCode=143 Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.978063 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9rfs8_must-gather-zjqmz_dd16370f-a0bb-4f72-8822-c4c87a37ba39/copy/0.log" Jan 30 22:28:23 crc kubenswrapper[4721]: I0130 22:28:23.978564 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.093773 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dd16370f-a0bb-4f72-8822-c4c87a37ba39-must-gather-output\") pod \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.093845 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnftb\" (UniqueName: \"kubernetes.io/projected/dd16370f-a0bb-4f72-8822-c4c87a37ba39-kube-api-access-pnftb\") pod \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\" (UID: \"dd16370f-a0bb-4f72-8822-c4c87a37ba39\") " Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.099415 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd16370f-a0bb-4f72-8822-c4c87a37ba39-kube-api-access-pnftb" (OuterVolumeSpecName: "kube-api-access-pnftb") pod "dd16370f-a0bb-4f72-8822-c4c87a37ba39" (UID: "dd16370f-a0bb-4f72-8822-c4c87a37ba39"). InnerVolumeSpecName "kube-api-access-pnftb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.196292 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnftb\" (UniqueName: \"kubernetes.io/projected/dd16370f-a0bb-4f72-8822-c4c87a37ba39-kube-api-access-pnftb\") on node \"crc\" DevicePath \"\"" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.311194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd16370f-a0bb-4f72-8822-c4c87a37ba39-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "dd16370f-a0bb-4f72-8822-c4c87a37ba39" (UID: "dd16370f-a0bb-4f72-8822-c4c87a37ba39"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.400162 4721 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dd16370f-a0bb-4f72-8822-c4c87a37ba39-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.742870 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9rfs8_must-gather-zjqmz_dd16370f-a0bb-4f72-8822-c4c87a37ba39/copy/0.log" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.743453 4721 scope.go:117] "RemoveContainer" containerID="ad4dad7c8fd5d8cc04f3e7f1178e0fe6e87d38da599dfb4dc7e1c56c62f179fc" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.743498 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9rfs8/must-gather-zjqmz" Jan 30 22:28:24 crc kubenswrapper[4721]: I0130 22:28:24.762322 4721 scope.go:117] "RemoveContainer" containerID="0de58a4330234414fad2e6b9cebb6610dd420b8e8af1b12593fc1835a00cbcd5" Jan 30 22:28:26 crc kubenswrapper[4721]: I0130 22:28:26.103452 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" path="/var/lib/kubelet/pods/dd16370f-a0bb-4f72-8822-c4c87a37ba39/volumes" Jan 30 22:28:29 crc kubenswrapper[4721]: I0130 22:28:29.448230 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:28:29 crc kubenswrapper[4721]: I0130 22:28:29.448727 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:28:59 crc kubenswrapper[4721]: I0130 22:28:59.449051 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:28:59 crc kubenswrapper[4721]: I0130 22:28:59.449609 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:28:59 crc kubenswrapper[4721]: I0130 22:28:59.449659 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:28:59 crc kubenswrapper[4721]: I0130 22:28:59.450482 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:28:59 crc kubenswrapper[4721]: I0130 22:28:59.450541 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" gracePeriod=600 Jan 30 22:28:59 crc kubenswrapper[4721]: E0130 22:28:59.571831 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:29:00 crc kubenswrapper[4721]: I0130 22:29:00.099606 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" exitCode=0 Jan 30 22:29:00 crc kubenswrapper[4721]: I0130 22:29:00.108694 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4"} Jan 30 22:29:00 crc kubenswrapper[4721]: I0130 22:29:00.108753 4721 scope.go:117] "RemoveContainer" containerID="7d2c93101ebb8dac6591210948bfb5b7aee51db0b6dbec2a95e6fcfd00e77c60" Jan 30 22:29:00 crc kubenswrapper[4721]: I0130 22:29:00.110254 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:29:00 crc kubenswrapper[4721]: E0130 22:29:00.110660 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:29:13 crc kubenswrapper[4721]: I0130 22:29:13.092837 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:29:13 crc kubenswrapper[4721]: E0130 22:29:13.093800 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:29:24 crc kubenswrapper[4721]: I0130 22:29:24.092743 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:29:24 crc kubenswrapper[4721]: E0130 22:29:24.093571 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:29:39 crc kubenswrapper[4721]: I0130 22:29:39.093093 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:29:39 crc kubenswrapper[4721]: E0130 22:29:39.094485 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:29:54 crc kubenswrapper[4721]: I0130 22:29:54.092242 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:29:54 crc kubenswrapper[4721]: E0130 22:29:54.093202 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.130109 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2swqt"] Jan 30 22:29:58 crc kubenswrapper[4721]: E0130 22:29:58.130854 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="gather" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.130867 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="gather" Jan 30 22:29:58 crc kubenswrapper[4721]: E0130 22:29:58.130892 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="extract-content" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.130900 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="extract-content" Jan 30 22:29:58 crc kubenswrapper[4721]: E0130 22:29:58.130924 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="extract-utilities" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.130930 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="extract-utilities" Jan 30 22:29:58 crc kubenswrapper[4721]: E0130 22:29:58.130945 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="registry-server" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.130951 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="registry-server" Jan 30 22:29:58 crc kubenswrapper[4721]: E0130 22:29:58.130965 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="copy" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.130972 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="copy" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.131151 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="copy" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.131177 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd16370f-a0bb-4f72-8822-c4c87a37ba39" containerName="gather" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.131188 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="157c43df-507b-4a53-b11d-d4a504254799" containerName="registry-server" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.132746 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.150282 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2swqt"] Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.200837 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s74p8\" (UniqueName: \"kubernetes.io/projected/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-kube-api-access-s74p8\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.201074 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-catalog-content\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.201262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-utilities\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.303032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s74p8\" (UniqueName: \"kubernetes.io/projected/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-kube-api-access-s74p8\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.303471 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-catalog-content\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.303578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-utilities\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.304004 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-catalog-content\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.304055 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-utilities\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.322777 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s74p8\" (UniqueName: \"kubernetes.io/projected/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-kube-api-access-s74p8\") pod \"community-operators-2swqt\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:58 crc kubenswrapper[4721]: I0130 22:29:58.458339 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:29:59 crc kubenswrapper[4721]: I0130 22:29:59.067196 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2swqt"] Jan 30 22:29:59 crc kubenswrapper[4721]: I0130 22:29:59.661972 4721 generic.go:334] "Generic (PLEG): container finished" podID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerID="49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d" exitCode=0 Jan 30 22:29:59 crc kubenswrapper[4721]: I0130 22:29:59.662465 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerDied","Data":"49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d"} Jan 30 22:29:59 crc kubenswrapper[4721]: I0130 22:29:59.662581 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerStarted","Data":"5f61c84fd3dee150d4aae3a59216ffb680fc21bf92116934b776b52f81c9a598"} Jan 30 22:29:59 crc kubenswrapper[4721]: I0130 22:29:59.664183 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.184125 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck"] Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.186253 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.188485 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.188800 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.196668 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck"] Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.242262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceeebb4a-eb90-4229-b72b-bad4b055c09a-config-volume\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.242510 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceeebb4a-eb90-4229-b72b-bad4b055c09a-secret-volume\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.242645 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr5zc\" (UniqueName: \"kubernetes.io/projected/ceeebb4a-eb90-4229-b72b-bad4b055c09a-kube-api-access-vr5zc\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.345365 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceeebb4a-eb90-4229-b72b-bad4b055c09a-config-volume\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.345499 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceeebb4a-eb90-4229-b72b-bad4b055c09a-secret-volume\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.345528 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr5zc\" (UniqueName: \"kubernetes.io/projected/ceeebb4a-eb90-4229-b72b-bad4b055c09a-kube-api-access-vr5zc\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.346640 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceeebb4a-eb90-4229-b72b-bad4b055c09a-config-volume\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.351594 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceeebb4a-eb90-4229-b72b-bad4b055c09a-secret-volume\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.365467 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr5zc\" (UniqueName: \"kubernetes.io/projected/ceeebb4a-eb90-4229-b72b-bad4b055c09a-kube-api-access-vr5zc\") pod \"collect-profiles-29496870-2t7ck\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.513609 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:00 crc kubenswrapper[4721]: I0130 22:30:00.687585 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerStarted","Data":"7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437"} Jan 30 22:30:01 crc kubenswrapper[4721]: I0130 22:30:01.010997 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck"] Jan 30 22:30:01 crc kubenswrapper[4721]: W0130 22:30:01.016785 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podceeebb4a_eb90_4229_b72b_bad4b055c09a.slice/crio-2db02130c5bcc44e603c591a9884aa532fd485c863284525db39671a11eaf5ce WatchSource:0}: Error finding container 2db02130c5bcc44e603c591a9884aa532fd485c863284525db39671a11eaf5ce: Status 404 returned error can't find the container with id 2db02130c5bcc44e603c591a9884aa532fd485c863284525db39671a11eaf5ce Jan 30 22:30:01 crc kubenswrapper[4721]: I0130 22:30:01.700032 4721 generic.go:334] "Generic (PLEG): container finished" podID="ceeebb4a-eb90-4229-b72b-bad4b055c09a" containerID="3faa3d346eacaa458f5fc8760534759481ee4e99bc143eeddd499fb28a8d21cb" exitCode=0 Jan 30 22:30:01 crc kubenswrapper[4721]: I0130 22:30:01.700109 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" event={"ID":"ceeebb4a-eb90-4229-b72b-bad4b055c09a","Type":"ContainerDied","Data":"3faa3d346eacaa458f5fc8760534759481ee4e99bc143eeddd499fb28a8d21cb"} Jan 30 22:30:01 crc kubenswrapper[4721]: I0130 22:30:01.700386 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" event={"ID":"ceeebb4a-eb90-4229-b72b-bad4b055c09a","Type":"ContainerStarted","Data":"2db02130c5bcc44e603c591a9884aa532fd485c863284525db39671a11eaf5ce"} Jan 30 22:30:02 crc kubenswrapper[4721]: I0130 22:30:02.734392 4721 generic.go:334] "Generic (PLEG): container finished" podID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerID="7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437" exitCode=0 Jan 30 22:30:02 crc kubenswrapper[4721]: I0130 22:30:02.734484 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerDied","Data":"7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437"} Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.294551 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.412328 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceeebb4a-eb90-4229-b72b-bad4b055c09a-config-volume\") pod \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.412406 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr5zc\" (UniqueName: \"kubernetes.io/projected/ceeebb4a-eb90-4229-b72b-bad4b055c09a-kube-api-access-vr5zc\") pod \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.412784 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceeebb4a-eb90-4229-b72b-bad4b055c09a-secret-volume\") pod \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\" (UID: \"ceeebb4a-eb90-4229-b72b-bad4b055c09a\") " Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.413310 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ceeebb4a-eb90-4229-b72b-bad4b055c09a-config-volume" (OuterVolumeSpecName: "config-volume") pod "ceeebb4a-eb90-4229-b72b-bad4b055c09a" (UID: "ceeebb4a-eb90-4229-b72b-bad4b055c09a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.413646 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceeebb4a-eb90-4229-b72b-bad4b055c09a-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.422448 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ceeebb4a-eb90-4229-b72b-bad4b055c09a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ceeebb4a-eb90-4229-b72b-bad4b055c09a" (UID: "ceeebb4a-eb90-4229-b72b-bad4b055c09a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.422765 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceeebb4a-eb90-4229-b72b-bad4b055c09a-kube-api-access-vr5zc" (OuterVolumeSpecName: "kube-api-access-vr5zc") pod "ceeebb4a-eb90-4229-b72b-bad4b055c09a" (UID: "ceeebb4a-eb90-4229-b72b-bad4b055c09a"). InnerVolumeSpecName "kube-api-access-vr5zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.518044 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr5zc\" (UniqueName: \"kubernetes.io/projected/ceeebb4a-eb90-4229-b72b-bad4b055c09a-kube-api-access-vr5zc\") on node \"crc\" DevicePath \"\"" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.518929 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceeebb4a-eb90-4229-b72b-bad4b055c09a-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.744803 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerStarted","Data":"49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c"} Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.747642 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" event={"ID":"ceeebb4a-eb90-4229-b72b-bad4b055c09a","Type":"ContainerDied","Data":"2db02130c5bcc44e603c591a9884aa532fd485c863284525db39671a11eaf5ce"} Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.747679 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2db02130c5bcc44e603c591a9884aa532fd485c863284525db39671a11eaf5ce" Jan 30 22:30:03 crc kubenswrapper[4721]: I0130 22:30:03.747710 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29496870-2t7ck" Jan 30 22:30:04 crc kubenswrapper[4721]: I0130 22:30:04.335087 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2swqt" podStartSLOduration=2.877552632 podStartE2EDuration="6.335060104s" podCreationTimestamp="2026-01-30 22:29:58 +0000 UTC" firstStartedPulling="2026-01-30 22:29:59.663982471 +0000 UTC m=+4388.455883717" lastFinishedPulling="2026-01-30 22:30:03.121489933 +0000 UTC m=+4391.913391189" observedRunningTime="2026-01-30 22:30:03.766025907 +0000 UTC m=+4392.557927163" watchObservedRunningTime="2026-01-30 22:30:04.335060104 +0000 UTC m=+4393.126961350" Jan 30 22:30:04 crc kubenswrapper[4721]: I0130 22:30:04.373130 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42"] Jan 30 22:30:04 crc kubenswrapper[4721]: I0130 22:30:04.382067 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29496825-2hl42"] Jan 30 22:30:05 crc kubenswrapper[4721]: I0130 22:30:05.092821 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:30:05 crc kubenswrapper[4721]: E0130 22:30:05.093087 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:30:06 crc kubenswrapper[4721]: I0130 22:30:06.139745 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d725937f-ee2d-431d-a0ed-94dd553cd014" path="/var/lib/kubelet/pods/d725937f-ee2d-431d-a0ed-94dd553cd014/volumes" Jan 30 22:30:08 crc kubenswrapper[4721]: I0130 22:30:08.458825 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:30:08 crc kubenswrapper[4721]: I0130 22:30:08.459211 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:30:08 crc kubenswrapper[4721]: I0130 22:30:08.523564 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:30:08 crc kubenswrapper[4721]: I0130 22:30:08.845706 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:30:08 crc kubenswrapper[4721]: I0130 22:30:08.892862 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2swqt"] Jan 30 22:30:10 crc kubenswrapper[4721]: I0130 22:30:10.811494 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2swqt" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="registry-server" containerID="cri-o://49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c" gracePeriod=2 Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.477231 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.582008 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s74p8\" (UniqueName: \"kubernetes.io/projected/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-kube-api-access-s74p8\") pod \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.582120 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-catalog-content\") pod \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.582188 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-utilities\") pod \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\" (UID: \"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf\") " Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.583582 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-utilities" (OuterVolumeSpecName: "utilities") pod "283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" (UID: "283d581e-55cd-4a35-b9c1-aebd9ea1b6bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.589837 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-kube-api-access-s74p8" (OuterVolumeSpecName: "kube-api-access-s74p8") pod "283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" (UID: "283d581e-55cd-4a35-b9c1-aebd9ea1b6bf"). InnerVolumeSpecName "kube-api-access-s74p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.684287 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s74p8\" (UniqueName: \"kubernetes.io/projected/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-kube-api-access-s74p8\") on node \"crc\" DevicePath \"\"" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.684553 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.826350 4721 generic.go:334] "Generic (PLEG): container finished" podID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerID="49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c" exitCode=0 Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.826412 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2swqt" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.826410 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerDied","Data":"49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c"} Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.826534 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2swqt" event={"ID":"283d581e-55cd-4a35-b9c1-aebd9ea1b6bf","Type":"ContainerDied","Data":"5f61c84fd3dee150d4aae3a59216ffb680fc21bf92116934b776b52f81c9a598"} Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.826551 4721 scope.go:117] "RemoveContainer" containerID="49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.844907 4721 scope.go:117] "RemoveContainer" containerID="7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.872360 4721 scope.go:117] "RemoveContainer" containerID="49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.920605 4721 scope.go:117] "RemoveContainer" containerID="49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c" Jan 30 22:30:11 crc kubenswrapper[4721]: E0130 22:30:11.920971 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c\": container with ID starting with 49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c not found: ID does not exist" containerID="49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.921008 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c"} err="failed to get container status \"49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c\": rpc error: code = NotFound desc = could not find container \"49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c\": container with ID starting with 49db9d7d0ef20d7519d126d3d3f18d81fe93653b650ac1e042876da51d14267c not found: ID does not exist" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.921031 4721 scope.go:117] "RemoveContainer" containerID="7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437" Jan 30 22:30:11 crc kubenswrapper[4721]: E0130 22:30:11.921642 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437\": container with ID starting with 7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437 not found: ID does not exist" containerID="7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.921667 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437"} err="failed to get container status \"7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437\": rpc error: code = NotFound desc = could not find container \"7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437\": container with ID starting with 7a0663a15a8651253f2940e3259ed85d5aece821bda08c55332bf5136ffcd437 not found: ID does not exist" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.921680 4721 scope.go:117] "RemoveContainer" containerID="49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d" Jan 30 22:30:11 crc kubenswrapper[4721]: E0130 22:30:11.922215 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d\": container with ID starting with 49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d not found: ID does not exist" containerID="49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d" Jan 30 22:30:11 crc kubenswrapper[4721]: I0130 22:30:11.922261 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d"} err="failed to get container status \"49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d\": rpc error: code = NotFound desc = could not find container \"49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d\": container with ID starting with 49907fac660a2ce5bfe0e50f57e30f0bc1a696f3e2052f023c52c86f553fbb9d not found: ID does not exist" Jan 30 22:30:12 crc kubenswrapper[4721]: I0130 22:30:12.171942 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" (UID: "283d581e-55cd-4a35-b9c1-aebd9ea1b6bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:30:12 crc kubenswrapper[4721]: I0130 22:30:12.194400 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:30:12 crc kubenswrapper[4721]: I0130 22:30:12.456433 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2swqt"] Jan 30 22:30:12 crc kubenswrapper[4721]: I0130 22:30:12.465475 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2swqt"] Jan 30 22:30:14 crc kubenswrapper[4721]: I0130 22:30:14.103586 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" path="/var/lib/kubelet/pods/283d581e-55cd-4a35-b9c1-aebd9ea1b6bf/volumes" Jan 30 22:30:16 crc kubenswrapper[4721]: I0130 22:30:16.093029 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:30:16 crc kubenswrapper[4721]: E0130 22:30:16.093769 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:30:17 crc kubenswrapper[4721]: I0130 22:30:17.308744 4721 scope.go:117] "RemoveContainer" containerID="229eb6ef07a4e86fefd23948eebf95b946fb84567533303c4f1bca3d2759366c" Jan 30 22:30:31 crc kubenswrapper[4721]: I0130 22:30:31.092908 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:30:31 crc kubenswrapper[4721]: E0130 22:30:31.093975 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:30:46 crc kubenswrapper[4721]: I0130 22:30:46.093011 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:30:46 crc kubenswrapper[4721]: E0130 22:30:46.094048 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:31:00 crc kubenswrapper[4721]: I0130 22:31:00.093256 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:31:00 crc kubenswrapper[4721]: E0130 22:31:00.094176 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:31:11 crc kubenswrapper[4721]: I0130 22:31:11.092696 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:31:11 crc kubenswrapper[4721]: E0130 22:31:11.093510 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:31:24 crc kubenswrapper[4721]: I0130 22:31:24.092939 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:31:24 crc kubenswrapper[4721]: E0130 22:31:24.093875 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.449140 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2mv76/must-gather-8wbxc"] Jan 30 22:31:25 crc kubenswrapper[4721]: E0130 22:31:25.449981 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="registry-server" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.449998 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="registry-server" Jan 30 22:31:25 crc kubenswrapper[4721]: E0130 22:31:25.450015 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceeebb4a-eb90-4229-b72b-bad4b055c09a" containerName="collect-profiles" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.450021 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceeebb4a-eb90-4229-b72b-bad4b055c09a" containerName="collect-profiles" Jan 30 22:31:25 crc kubenswrapper[4721]: E0130 22:31:25.450037 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="extract-utilities" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.450045 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="extract-utilities" Jan 30 22:31:25 crc kubenswrapper[4721]: E0130 22:31:25.450058 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="extract-content" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.450064 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="extract-content" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.450255 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="283d581e-55cd-4a35-b9c1-aebd9ea1b6bf" containerName="registry-server" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.450267 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceeebb4a-eb90-4229-b72b-bad4b055c09a" containerName="collect-profiles" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.451394 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.457854 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2mv76"/"openshift-service-ca.crt" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.458170 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2mv76"/"kube-root-ca.crt" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.479162 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2mv76/must-gather-8wbxc"] Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.515686 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ec87141-36fc-4527-a7ad-91fead722da3-must-gather-output\") pod \"must-gather-8wbxc\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.515798 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8svr\" (UniqueName: \"kubernetes.io/projected/8ec87141-36fc-4527-a7ad-91fead722da3-kube-api-access-n8svr\") pod \"must-gather-8wbxc\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.617583 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8svr\" (UniqueName: \"kubernetes.io/projected/8ec87141-36fc-4527-a7ad-91fead722da3-kube-api-access-n8svr\") pod \"must-gather-8wbxc\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.617805 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ec87141-36fc-4527-a7ad-91fead722da3-must-gather-output\") pod \"must-gather-8wbxc\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.618178 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ec87141-36fc-4527-a7ad-91fead722da3-must-gather-output\") pod \"must-gather-8wbxc\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.642937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8svr\" (UniqueName: \"kubernetes.io/projected/8ec87141-36fc-4527-a7ad-91fead722da3-kube-api-access-n8svr\") pod \"must-gather-8wbxc\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:25 crc kubenswrapper[4721]: I0130 22:31:25.775666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:31:26 crc kubenswrapper[4721]: I0130 22:31:26.242332 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2mv76/must-gather-8wbxc"] Jan 30 22:31:26 crc kubenswrapper[4721]: I0130 22:31:26.563590 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/must-gather-8wbxc" event={"ID":"8ec87141-36fc-4527-a7ad-91fead722da3","Type":"ContainerStarted","Data":"a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2"} Jan 30 22:31:26 crc kubenswrapper[4721]: I0130 22:31:26.563942 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/must-gather-8wbxc" event={"ID":"8ec87141-36fc-4527-a7ad-91fead722da3","Type":"ContainerStarted","Data":"3846c82a6f977c8e766782b68586e14ac35c53654fc87826cf5fe10979ce2daa"} Jan 30 22:31:27 crc kubenswrapper[4721]: I0130 22:31:27.573231 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/must-gather-8wbxc" event={"ID":"8ec87141-36fc-4527-a7ad-91fead722da3","Type":"ContainerStarted","Data":"68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145"} Jan 30 22:31:27 crc kubenswrapper[4721]: I0130 22:31:27.600309 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-2mv76/must-gather-8wbxc" podStartSLOduration=2.60028046 podStartE2EDuration="2.60028046s" podCreationTimestamp="2026-01-30 22:31:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 22:31:27.591934458 +0000 UTC m=+4476.383835724" watchObservedRunningTime="2026-01-30 22:31:27.60028046 +0000 UTC m=+4476.392181706" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.087533 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2mv76/crc-debug-jtlc4"] Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.090594 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.095055 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2mv76"/"default-dockercfg-dmwpd" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.219393 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmz6p\" (UniqueName: \"kubernetes.io/projected/0ff8f477-f012-4804-855a-74655e0d6088-kube-api-access-wmz6p\") pod \"crc-debug-jtlc4\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.219527 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ff8f477-f012-4804-855a-74655e0d6088-host\") pod \"crc-debug-jtlc4\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.321606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmz6p\" (UniqueName: \"kubernetes.io/projected/0ff8f477-f012-4804-855a-74655e0d6088-kube-api-access-wmz6p\") pod \"crc-debug-jtlc4\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.321710 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ff8f477-f012-4804-855a-74655e0d6088-host\") pod \"crc-debug-jtlc4\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.321976 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ff8f477-f012-4804-855a-74655e0d6088-host\") pod \"crc-debug-jtlc4\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.341747 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmz6p\" (UniqueName: \"kubernetes.io/projected/0ff8f477-f012-4804-855a-74655e0d6088-kube-api-access-wmz6p\") pod \"crc-debug-jtlc4\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.430714 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:31:30 crc kubenswrapper[4721]: W0130 22:31:30.459286 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ff8f477_f012_4804_855a_74655e0d6088.slice/crio-c4aab46a959da46591799471e607d908439d6b89361643aa3fb86a37e426c464 WatchSource:0}: Error finding container c4aab46a959da46591799471e607d908439d6b89361643aa3fb86a37e426c464: Status 404 returned error can't find the container with id c4aab46a959da46591799471e607d908439d6b89361643aa3fb86a37e426c464 Jan 30 22:31:30 crc kubenswrapper[4721]: I0130 22:31:30.606496 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" event={"ID":"0ff8f477-f012-4804-855a-74655e0d6088","Type":"ContainerStarted","Data":"c4aab46a959da46591799471e607d908439d6b89361643aa3fb86a37e426c464"} Jan 30 22:31:31 crc kubenswrapper[4721]: I0130 22:31:31.621474 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" event={"ID":"0ff8f477-f012-4804-855a-74655e0d6088","Type":"ContainerStarted","Data":"d655a09627d291b2cfb9d750d271e6507487e85f1fdf5d1b558a1f58d0d93c40"} Jan 30 22:31:31 crc kubenswrapper[4721]: I0130 22:31:31.649492 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" podStartSLOduration=1.6494739539999999 podStartE2EDuration="1.649473954s" podCreationTimestamp="2026-01-30 22:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 22:31:31.636613361 +0000 UTC m=+4480.428514627" watchObservedRunningTime="2026-01-30 22:31:31.649473954 +0000 UTC m=+4480.441375200" Jan 30 22:31:39 crc kubenswrapper[4721]: I0130 22:31:39.092723 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:31:39 crc kubenswrapper[4721]: E0130 22:31:39.094904 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:31:53 crc kubenswrapper[4721]: I0130 22:31:53.091922 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:31:53 crc kubenswrapper[4721]: E0130 22:31:53.092753 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:32:06 crc kubenswrapper[4721]: I0130 22:32:06.092599 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:32:06 crc kubenswrapper[4721]: E0130 22:32:06.093595 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:32:18 crc kubenswrapper[4721]: I0130 22:32:18.092199 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:32:18 crc kubenswrapper[4721]: E0130 22:32:18.093069 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:32:21 crc kubenswrapper[4721]: I0130 22:32:21.094943 4721 generic.go:334] "Generic (PLEG): container finished" podID="0ff8f477-f012-4804-855a-74655e0d6088" containerID="d655a09627d291b2cfb9d750d271e6507487e85f1fdf5d1b558a1f58d0d93c40" exitCode=0 Jan 30 22:32:21 crc kubenswrapper[4721]: I0130 22:32:21.095011 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" event={"ID":"0ff8f477-f012-4804-855a-74655e0d6088","Type":"ContainerDied","Data":"d655a09627d291b2cfb9d750d271e6507487e85f1fdf5d1b558a1f58d0d93c40"} Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.256895 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.310186 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2mv76/crc-debug-jtlc4"] Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.320038 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2mv76/crc-debug-jtlc4"] Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.428142 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ff8f477-f012-4804-855a-74655e0d6088-host\") pod \"0ff8f477-f012-4804-855a-74655e0d6088\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.428250 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmz6p\" (UniqueName: \"kubernetes.io/projected/0ff8f477-f012-4804-855a-74655e0d6088-kube-api-access-wmz6p\") pod \"0ff8f477-f012-4804-855a-74655e0d6088\" (UID: \"0ff8f477-f012-4804-855a-74655e0d6088\") " Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.428321 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0ff8f477-f012-4804-855a-74655e0d6088-host" (OuterVolumeSpecName: "host") pod "0ff8f477-f012-4804-855a-74655e0d6088" (UID: "0ff8f477-f012-4804-855a-74655e0d6088"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.428931 4721 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ff8f477-f012-4804-855a-74655e0d6088-host\") on node \"crc\" DevicePath \"\"" Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.433493 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ff8f477-f012-4804-855a-74655e0d6088-kube-api-access-wmz6p" (OuterVolumeSpecName: "kube-api-access-wmz6p") pod "0ff8f477-f012-4804-855a-74655e0d6088" (UID: "0ff8f477-f012-4804-855a-74655e0d6088"). InnerVolumeSpecName "kube-api-access-wmz6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:32:22 crc kubenswrapper[4721]: I0130 22:32:22.530916 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmz6p\" (UniqueName: \"kubernetes.io/projected/0ff8f477-f012-4804-855a-74655e0d6088-kube-api-access-wmz6p\") on node \"crc\" DevicePath \"\"" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.115359 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4aab46a959da46591799471e607d908439d6b89361643aa3fb86a37e426c464" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.115431 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-jtlc4" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.511954 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2mv76/crc-debug-b9rft"] Jan 30 22:32:23 crc kubenswrapper[4721]: E0130 22:32:23.512656 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ff8f477-f012-4804-855a-74655e0d6088" containerName="container-00" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.512668 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ff8f477-f012-4804-855a-74655e0d6088" containerName="container-00" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.512871 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ff8f477-f012-4804-855a-74655e0d6088" containerName="container-00" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.513800 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.517990 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2mv76"/"default-dockercfg-dmwpd" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.653196 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-host\") pod \"crc-debug-b9rft\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.653237 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctpfx\" (UniqueName: \"kubernetes.io/projected/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-kube-api-access-ctpfx\") pod \"crc-debug-b9rft\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.755390 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-host\") pod \"crc-debug-b9rft\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.755429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctpfx\" (UniqueName: \"kubernetes.io/projected/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-kube-api-access-ctpfx\") pod \"crc-debug-b9rft\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.755512 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-host\") pod \"crc-debug-b9rft\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.775584 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctpfx\" (UniqueName: \"kubernetes.io/projected/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-kube-api-access-ctpfx\") pod \"crc-debug-b9rft\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:23 crc kubenswrapper[4721]: I0130 22:32:23.833017 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:24 crc kubenswrapper[4721]: I0130 22:32:24.102328 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ff8f477-f012-4804-855a-74655e0d6088" path="/var/lib/kubelet/pods/0ff8f477-f012-4804-855a-74655e0d6088/volumes" Jan 30 22:32:24 crc kubenswrapper[4721]: I0130 22:32:24.147083 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-b9rft" event={"ID":"75e9435a-363c-44e3-b2f2-d7e1a6076fe9","Type":"ContainerStarted","Data":"c4cf6330a5a6a0b41b9e4a8444168d3f2599918667eda019c6e778164fb24029"} Jan 30 22:32:25 crc kubenswrapper[4721]: I0130 22:32:25.158930 4721 generic.go:334] "Generic (PLEG): container finished" podID="75e9435a-363c-44e3-b2f2-d7e1a6076fe9" containerID="e79e3d9cef64153b34defe7fc077a468e4153cf46fe7608936e41ee1ce4e02c5" exitCode=0 Jan 30 22:32:25 crc kubenswrapper[4721]: I0130 22:32:25.159011 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-b9rft" event={"ID":"75e9435a-363c-44e3-b2f2-d7e1a6076fe9","Type":"ContainerDied","Data":"e79e3d9cef64153b34defe7fc077a468e4153cf46fe7608936e41ee1ce4e02c5"} Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.304040 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.407836 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctpfx\" (UniqueName: \"kubernetes.io/projected/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-kube-api-access-ctpfx\") pod \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.408129 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-host\") pod \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\" (UID: \"75e9435a-363c-44e3-b2f2-d7e1a6076fe9\") " Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.408242 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-host" (OuterVolumeSpecName: "host") pod "75e9435a-363c-44e3-b2f2-d7e1a6076fe9" (UID: "75e9435a-363c-44e3-b2f2-d7e1a6076fe9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.408611 4721 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-host\") on node \"crc\" DevicePath \"\"" Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.416862 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-kube-api-access-ctpfx" (OuterVolumeSpecName: "kube-api-access-ctpfx") pod "75e9435a-363c-44e3-b2f2-d7e1a6076fe9" (UID: "75e9435a-363c-44e3-b2f2-d7e1a6076fe9"). InnerVolumeSpecName "kube-api-access-ctpfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.510151 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctpfx\" (UniqueName: \"kubernetes.io/projected/75e9435a-363c-44e3-b2f2-d7e1a6076fe9-kube-api-access-ctpfx\") on node \"crc\" DevicePath \"\"" Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.703356 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2mv76/crc-debug-b9rft"] Jan 30 22:32:26 crc kubenswrapper[4721]: I0130 22:32:26.711564 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2mv76/crc-debug-b9rft"] Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.178203 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4cf6330a5a6a0b41b9e4a8444168d3f2599918667eda019c6e778164fb24029" Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.178312 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-b9rft" Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.899484 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2mv76/crc-debug-xlngs"] Jan 30 22:32:27 crc kubenswrapper[4721]: E0130 22:32:27.900142 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75e9435a-363c-44e3-b2f2-d7e1a6076fe9" containerName="container-00" Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.900154 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="75e9435a-363c-44e3-b2f2-d7e1a6076fe9" containerName="container-00" Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.900366 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="75e9435a-363c-44e3-b2f2-d7e1a6076fe9" containerName="container-00" Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.901057 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:27 crc kubenswrapper[4721]: I0130 22:32:27.903804 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2mv76"/"default-dockercfg-dmwpd" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.039173 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzmzc\" (UniqueName: \"kubernetes.io/projected/cd552ad5-ead8-4fb5-8efb-8458e626e97d-kube-api-access-xzmzc\") pod \"crc-debug-xlngs\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.039245 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cd552ad5-ead8-4fb5-8efb-8458e626e97d-host\") pod \"crc-debug-xlngs\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.103763 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75e9435a-363c-44e3-b2f2-d7e1a6076fe9" path="/var/lib/kubelet/pods/75e9435a-363c-44e3-b2f2-d7e1a6076fe9/volumes" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.141624 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzmzc\" (UniqueName: \"kubernetes.io/projected/cd552ad5-ead8-4fb5-8efb-8458e626e97d-kube-api-access-xzmzc\") pod \"crc-debug-xlngs\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.141692 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cd552ad5-ead8-4fb5-8efb-8458e626e97d-host\") pod \"crc-debug-xlngs\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.141836 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cd552ad5-ead8-4fb5-8efb-8458e626e97d-host\") pod \"crc-debug-xlngs\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.159562 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzmzc\" (UniqueName: \"kubernetes.io/projected/cd552ad5-ead8-4fb5-8efb-8458e626e97d-kube-api-access-xzmzc\") pod \"crc-debug-xlngs\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: I0130 22:32:28.222082 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:28 crc kubenswrapper[4721]: W0130 22:32:28.251692 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd552ad5_ead8_4fb5_8efb_8458e626e97d.slice/crio-ad54faff873137c39efd83cb4e489628eb92dd819fb2a2258f5ee9a6be17a603 WatchSource:0}: Error finding container ad54faff873137c39efd83cb4e489628eb92dd819fb2a2258f5ee9a6be17a603: Status 404 returned error can't find the container with id ad54faff873137c39efd83cb4e489628eb92dd819fb2a2258f5ee9a6be17a603 Jan 30 22:32:29 crc kubenswrapper[4721]: I0130 22:32:29.092640 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:32:29 crc kubenswrapper[4721]: E0130 22:32:29.093489 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:32:29 crc kubenswrapper[4721]: I0130 22:32:29.201077 4721 generic.go:334] "Generic (PLEG): container finished" podID="cd552ad5-ead8-4fb5-8efb-8458e626e97d" containerID="a4df22835194b99b3db2e2b4f2acd49ee7878474f68e2d7d8a64418c58ef43e0" exitCode=0 Jan 30 22:32:29 crc kubenswrapper[4721]: I0130 22:32:29.201180 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-xlngs" event={"ID":"cd552ad5-ead8-4fb5-8efb-8458e626e97d","Type":"ContainerDied","Data":"a4df22835194b99b3db2e2b4f2acd49ee7878474f68e2d7d8a64418c58ef43e0"} Jan 30 22:32:29 crc kubenswrapper[4721]: I0130 22:32:29.201514 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/crc-debug-xlngs" event={"ID":"cd552ad5-ead8-4fb5-8efb-8458e626e97d","Type":"ContainerStarted","Data":"ad54faff873137c39efd83cb4e489628eb92dd819fb2a2258f5ee9a6be17a603"} Jan 30 22:32:29 crc kubenswrapper[4721]: I0130 22:32:29.240264 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2mv76/crc-debug-xlngs"] Jan 30 22:32:29 crc kubenswrapper[4721]: I0130 22:32:29.250162 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2mv76/crc-debug-xlngs"] Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.312104 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.489225 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cd552ad5-ead8-4fb5-8efb-8458e626e97d-host\") pod \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.489345 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cd552ad5-ead8-4fb5-8efb-8458e626e97d-host" (OuterVolumeSpecName: "host") pod "cd552ad5-ead8-4fb5-8efb-8458e626e97d" (UID: "cd552ad5-ead8-4fb5-8efb-8458e626e97d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.489406 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzmzc\" (UniqueName: \"kubernetes.io/projected/cd552ad5-ead8-4fb5-8efb-8458e626e97d-kube-api-access-xzmzc\") pod \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\" (UID: \"cd552ad5-ead8-4fb5-8efb-8458e626e97d\") " Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.490294 4721 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cd552ad5-ead8-4fb5-8efb-8458e626e97d-host\") on node \"crc\" DevicePath \"\"" Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.498925 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd552ad5-ead8-4fb5-8efb-8458e626e97d-kube-api-access-xzmzc" (OuterVolumeSpecName: "kube-api-access-xzmzc") pod "cd552ad5-ead8-4fb5-8efb-8458e626e97d" (UID: "cd552ad5-ead8-4fb5-8efb-8458e626e97d"). InnerVolumeSpecName "kube-api-access-xzmzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:32:30 crc kubenswrapper[4721]: I0130 22:32:30.592047 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzmzc\" (UniqueName: \"kubernetes.io/projected/cd552ad5-ead8-4fb5-8efb-8458e626e97d-kube-api-access-xzmzc\") on node \"crc\" DevicePath \"\"" Jan 30 22:32:31 crc kubenswrapper[4721]: I0130 22:32:31.222968 4721 scope.go:117] "RemoveContainer" containerID="a4df22835194b99b3db2e2b4f2acd49ee7878474f68e2d7d8a64418c58ef43e0" Jan 30 22:32:31 crc kubenswrapper[4721]: I0130 22:32:31.223156 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/crc-debug-xlngs" Jan 30 22:32:32 crc kubenswrapper[4721]: I0130 22:32:32.110685 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd552ad5-ead8-4fb5-8efb-8458e626e97d" path="/var/lib/kubelet/pods/cd552ad5-ead8-4fb5-8efb-8458e626e97d/volumes" Jan 30 22:32:42 crc kubenswrapper[4721]: I0130 22:32:42.100616 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:32:42 crc kubenswrapper[4721]: E0130 22:32:42.101400 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:32:54 crc kubenswrapper[4721]: I0130 22:32:54.092564 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:32:54 crc kubenswrapper[4721]: E0130 22:32:54.093436 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.049773 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pbrzx"] Jan 30 22:33:04 crc kubenswrapper[4721]: E0130 22:33:04.050933 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd552ad5-ead8-4fb5-8efb-8458e626e97d" containerName="container-00" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.050955 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd552ad5-ead8-4fb5-8efb-8458e626e97d" containerName="container-00" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.051182 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd552ad5-ead8-4fb5-8efb-8458e626e97d" containerName="container-00" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.053716 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.110520 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pbrzx"] Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.119639 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-catalog-content\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.119826 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75s2h\" (UniqueName: \"kubernetes.io/projected/8173627e-5b0d-47e2-a5fd-cef57219a826-kube-api-access-75s2h\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.119906 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-utilities\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.221720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-catalog-content\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.221961 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75s2h\" (UniqueName: \"kubernetes.io/projected/8173627e-5b0d-47e2-a5fd-cef57219a826-kube-api-access-75s2h\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.222036 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-utilities\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.222409 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-catalog-content\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.222468 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-utilities\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.242784 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75s2h\" (UniqueName: \"kubernetes.io/projected/8173627e-5b0d-47e2-a5fd-cef57219a826-kube-api-access-75s2h\") pod \"redhat-operators-pbrzx\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.388818 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:04 crc kubenswrapper[4721]: I0130 22:33:04.928559 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pbrzx"] Jan 30 22:33:04 crc kubenswrapper[4721]: W0130 22:33:04.951486 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8173627e_5b0d_47e2_a5fd_cef57219a826.slice/crio-60be9b51d08a9fac9163444d59ddbb6e0e3d268b58eb2941af7f9ad201d00267 WatchSource:0}: Error finding container 60be9b51d08a9fac9163444d59ddbb6e0e3d268b58eb2941af7f9ad201d00267: Status 404 returned error can't find the container with id 60be9b51d08a9fac9163444d59ddbb6e0e3d268b58eb2941af7f9ad201d00267 Jan 30 22:33:05 crc kubenswrapper[4721]: I0130 22:33:05.567291 4721 generic.go:334] "Generic (PLEG): container finished" podID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerID="aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583" exitCode=0 Jan 30 22:33:05 crc kubenswrapper[4721]: I0130 22:33:05.567761 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerDied","Data":"aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583"} Jan 30 22:33:05 crc kubenswrapper[4721]: I0130 22:33:05.567847 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerStarted","Data":"60be9b51d08a9fac9163444d59ddbb6e0e3d268b58eb2941af7f9ad201d00267"} Jan 30 22:33:06 crc kubenswrapper[4721]: I0130 22:33:06.585210 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerStarted","Data":"830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3"} Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.092775 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:33:09 crc kubenswrapper[4721]: E0130 22:33:09.093585 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.096525 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/init-config-reloader/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.299273 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/alertmanager/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.322344 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/config-reloader/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.492830 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-68b6df574b-x89tq_e418bacc-47a2-45cd-9bb3-35e42563c482/barbican-api/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.525260 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-68b6df574b-x89tq_e418bacc-47a2-45cd-9bb3-35e42563c482/barbican-api-log/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.710053 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6db69d5bd6-grvs7_925d81d0-2e07-4a27-a8d1-7edff62fe070/barbican-keystone-listener/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.852228 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6db69d5bd6-grvs7_925d81d0-2e07-4a27-a8d1-7edff62fe070/barbican-keystone-listener-log/0.log" Jan 30 22:33:09 crc kubenswrapper[4721]: I0130 22:33:09.893950 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6456f55dbc-h7p5b_7457bf9f-e5dd-47af-9346-898a62273a3a/barbican-worker/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.124530 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87b1d1f9-cf16-401a-b55d-a6d2434e0284/init-config-reloader/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.128601 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6456f55dbc-h7p5b_7457bf9f-e5dd-47af-9346-898a62273a3a/barbican-worker-log/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.204099 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-4w9wr_0093f639-dd37-4e8d-86da-c6149cb3a4c4/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.377706 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/ceilometer-notification-agent/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.409054 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/ceilometer-central-agent/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.418730 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/proxy-httpd/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.576397 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c3d1003c-e848-4ff5-a27e-f1cff2e8162c/sg-core/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.634596 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e690f08-a69f-4b8a-9698-f66afbf94f43/cinder-api/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.655650 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e690f08-a69f-4b8a-9698-f66afbf94f43/cinder-api-log/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.884546 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6b2e94bb-fd95-448e-8ab0-b79d741fd7f5/probe/0.log" Jan 30 22:33:10 crc kubenswrapper[4721]: I0130 22:33:10.919927 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6b2e94bb-fd95-448e-8ab0-b79d741fd7f5/cinder-scheduler/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.109183 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_41460120-522d-44cf-a772-29cb623f9c14/cloudkitty-api-log/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.215995 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_1a3aadfd-1e26-407f-98a8-c3f5681c2126/loki-compactor/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.232998 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_41460120-522d-44cf-a772-29cb623f9c14/cloudkitty-api/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.441100 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-66dfd9bb-s89nx_f7847558-f6b6-4f0a-8fd3-45e0fef7fce4/loki-distributor/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.510633 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-7db4f4db8c-2plvn_9d09968c-71f8-4bad-855b-ebc5abb78989/gateway/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.684705 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-7db4f4db8c-9wfdz_a6518545-fdf0-4445-8e62-d7ca4816779d/gateway/0.log" Jan 30 22:33:11 crc kubenswrapper[4721]: I0130 22:33:11.836124 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_ae53bce6-479b-4d55-9fb5-2441850bec4a/loki-index-gateway/0.log" Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.013042 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_3aba9e5f-541b-42d2-9cd4-6a6ad04bcbc3/loki-ingester/0.log" Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.266226 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-795fd8f8cc-csn7z_3b976659-d481-4cd4-b1b1-72a7d465067d/loki-querier/0.log" Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.368276 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-5cd44666df-rkl6r_6e77f4bd-bf5d-4043-ae9e-e938a4e99b69/loki-query-frontend/0.log" Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.634990 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-n8gnj_48a8b210-aaab-46b3-8436-f4acab16a60a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.641458 4721 generic.go:334] "Generic (PLEG): container finished" podID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerID="830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3" exitCode=0 Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.641499 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerDied","Data":"830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3"} Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.877719 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-dg7wk_aeb46845-60c0-48ae-960e-4f138a1caf5e/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:12 crc kubenswrapper[4721]: I0130 22:33:12.991901 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-4gj7m_94aad825-d42e-410a-a415-ff4e1910ecdb/init/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.244860 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-4gj7m_94aad825-d42e-410a-a415-ff4e1910ecdb/init/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.380387 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-rhnq6_ba5cfe2a-1f42-45e4-b0fd-a6e42e17c665/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.383136 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-4gj7m_94aad825-d42e-410a-a415-ff4e1910ecdb/dnsmasq-dns/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.657341 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerStarted","Data":"73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2"} Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.687207 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pbrzx" podStartSLOduration=2.051452448 podStartE2EDuration="9.68718591s" podCreationTimestamp="2026-01-30 22:33:04 +0000 UTC" firstStartedPulling="2026-01-30 22:33:05.572607143 +0000 UTC m=+4574.364508419" lastFinishedPulling="2026-01-30 22:33:13.208340635 +0000 UTC m=+4582.000241881" observedRunningTime="2026-01-30 22:33:13.677374412 +0000 UTC m=+4582.469275678" watchObservedRunningTime="2026-01-30 22:33:13.68718591 +0000 UTC m=+4582.479087156" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.738202 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_acb7c332-79bc-432b-b046-248772221388/glance-httpd/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.738693 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_acb7c332-79bc-432b-b046-248772221388/glance-log/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.919868 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9/glance-httpd/0.log" Jan 30 22:33:13 crc kubenswrapper[4721]: I0130 22:33:13.953551 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8996ce6f-c6f1-43e9-9aaa-12ea40c56bc9/glance-log/0.log" Jan 30 22:33:14 crc kubenswrapper[4721]: I0130 22:33:14.073233 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-5b4mr_f2c051d1-6a5d-4950-953d-204cc6adfc6e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:14 crc kubenswrapper[4721]: I0130 22:33:14.388994 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:14 crc kubenswrapper[4721]: I0130 22:33:14.389035 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:14 crc kubenswrapper[4721]: I0130 22:33:14.934400 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-p4zrm_d1bfbef9-b785-4687-a0fa-471a6b4b6957/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:15 crc kubenswrapper[4721]: I0130 22:33:15.156749 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29496841-6wgsv_33f22249-bfaa-4818-a56c-2d0192a8bef6/keystone-cron/0.log" Jan 30 22:33:15 crc kubenswrapper[4721]: I0130 22:33:15.342213 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-54b7957dff-42xvx_e94a796b-4938-444f-811d-dbba68141f41/keystone-api/0.log" Jan 30 22:33:15 crc kubenswrapper[4721]: I0130 22:33:15.374474 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_b8e82608-6b61-42c4-b4fc-6f1fe545e119/kube-state-metrics/0.log" Jan 30 22:33:15 crc kubenswrapper[4721]: I0130 22:33:15.501690 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-bx7d4_75dd1a6a-3fe9-4016-bdb1-bbc9ec572417/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:15 crc kubenswrapper[4721]: I0130 22:33:15.679369 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pbrzx" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" probeResult="failure" output=< Jan 30 22:33:15 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 22:33:15 crc kubenswrapper[4721]: > Jan 30 22:33:16 crc kubenswrapper[4721]: I0130 22:33:16.115952 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7cccf5fc8f-zbdml_96081b17-acc8-4700-91da-9a966b7e7f1c/neutron-httpd/0.log" Jan 30 22:33:16 crc kubenswrapper[4721]: I0130 22:33:16.192752 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7cccf5fc8f-zbdml_96081b17-acc8-4700-91da-9a966b7e7f1c/neutron-api/0.log" Jan 30 22:33:16 crc kubenswrapper[4721]: I0130 22:33:16.774540 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-59n7k_433d1a2c-a03e-483a-9dba-2adde950cf1f/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:17 crc kubenswrapper[4721]: I0130 22:33:17.440684 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_03dc15ed-69f7-4a98-b586-a7e051ba2bbe/nova-api-log/0.log" Jan 30 22:33:17 crc kubenswrapper[4721]: I0130 22:33:17.685113 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c01ff72c-19ac-4ae7-8c07-d9e0c01c669f/nova-cell0-conductor-conductor/0.log" Jan 30 22:33:17 crc kubenswrapper[4721]: I0130 22:33:17.728792 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_03dc15ed-69f7-4a98-b586-a7e051ba2bbe/nova-api-api/0.log" Jan 30 22:33:18 crc kubenswrapper[4721]: I0130 22:33:18.073880 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_55612f9b-a463-4acf-9f8a-647372b6c4a0/nova-cell1-novncproxy-novncproxy/0.log" Jan 30 22:33:18 crc kubenswrapper[4721]: I0130 22:33:18.111056 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_0ca8e7a1-e433-4c9a-9532-f695fedd853e/nova-cell1-conductor-conductor/0.log" Jan 30 22:33:18 crc kubenswrapper[4721]: I0130 22:33:18.537074 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-g4xdp_e148581e-1ed2-4532-a179-f1491d58dc0e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:18 crc kubenswrapper[4721]: I0130 22:33:18.825880 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_766e6806-c4e1-4db9-9c4e-93a466d182f1/nova-metadata-log/0.log" Jan 30 22:33:19 crc kubenswrapper[4721]: I0130 22:33:19.466239 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8bff96ff-2424-4622-8c4d-d866a4b28b21/mysql-bootstrap/0.log" Jan 30 22:33:19 crc kubenswrapper[4721]: I0130 22:33:19.549856 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_2e8d2389-e04d-4427-8e1e-ef0d8617a29f/nova-scheduler-scheduler/0.log" Jan 30 22:33:19 crc kubenswrapper[4721]: I0130 22:33:19.816905 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8bff96ff-2424-4622-8c4d-d866a4b28b21/galera/0.log" Jan 30 22:33:19 crc kubenswrapper[4721]: I0130 22:33:19.871078 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8bff96ff-2424-4622-8c4d-d866a4b28b21/mysql-bootstrap/0.log" Jan 30 22:33:20 crc kubenswrapper[4721]: I0130 22:33:20.132450 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_96303720-27c1-495f-8597-5891c08c5e06/mysql-bootstrap/0.log" Jan 30 22:33:20 crc kubenswrapper[4721]: I0130 22:33:20.333606 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_96303720-27c1-495f-8597-5891c08c5e06/mysql-bootstrap/0.log" Jan 30 22:33:20 crc kubenswrapper[4721]: I0130 22:33:20.404842 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_96303720-27c1-495f-8597-5891c08c5e06/galera/0.log" Jan 30 22:33:20 crc kubenswrapper[4721]: I0130 22:33:20.684000 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_fc69e094-e84a-44d5-9a2c-726bac11b1c2/openstackclient/0.log" Jan 30 22:33:20 crc kubenswrapper[4721]: I0130 22:33:20.959598 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4k958_522b5333-a647-446e-a261-b1828a1d20a3/ovn-controller/0.log" Jan 30 22:33:20 crc kubenswrapper[4721]: I0130 22:33:20.989076 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_766e6806-c4e1-4db9-9c4e-93a466d182f1/nova-metadata-metadata/0.log" Jan 30 22:33:21 crc kubenswrapper[4721]: I0130 22:33:21.195616 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-fmzsr_133e2a95-0b74-4b44-9ea1-d6a37d548876/openstack-network-exporter/0.log" Jan 30 22:33:21 crc kubenswrapper[4721]: I0130 22:33:21.449211 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovsdb-server-init/0.log" Jan 30 22:33:21 crc kubenswrapper[4721]: I0130 22:33:21.708510 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovsdb-server-init/0.log" Jan 30 22:33:21 crc kubenswrapper[4721]: I0130 22:33:21.743018 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovs-vswitchd/0.log" Jan 30 22:33:21 crc kubenswrapper[4721]: I0130 22:33:21.745713 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8mqsj_b0f6d373-29bd-47a5-8cf5-3937fbc1498f/ovsdb-server/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.106864 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qbsgd_0bd6ab29-7eb6-4f2a-ad3f-0992ba77ab86/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.267216 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cbab3069-54ee-4146-b912-5e59c0039f86/openstack-network-exporter/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.314678 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cbab3069-54ee-4146-b912-5e59c0039f86/ovn-northd/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.462282 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_63a5898f-2b47-44bb-85a0-1700940899c1/openstack-network-exporter/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.562967 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_63a5898f-2b47-44bb-85a0-1700940899c1/ovsdbserver-nb/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.747247 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_796cb9cb-aad7-4645-89ae-ae8764bfbe17/openstack-network-exporter/0.log" Jan 30 22:33:22 crc kubenswrapper[4721]: I0130 22:33:22.823577 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_796cb9cb-aad7-4645-89ae-ae8764bfbe17/ovsdbserver-sb/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.087021 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5c759f49d6-k22ln_9def88bd-3017-4ea9-8a12-b895aeb4b28f/placement-api/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.093253 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:33:23 crc kubenswrapper[4721]: E0130 22:33:23.093551 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.217783 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5c759f49d6-k22ln_9def88bd-3017-4ea9-8a12-b895aeb4b28f/placement-log/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.378549 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/init-config-reloader/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.591090 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/init-config-reloader/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.644152 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/config-reloader/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.659957 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/prometheus/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.814776 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_42159633-a347-4843-9639-6e346cee733e/thanos-sidecar/0.log" Jan 30 22:33:23 crc kubenswrapper[4721]: I0130 22:33:23.917371 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_12f1cce2-7b07-4519-b1c3-15e57ed44cde/setup-container/0.log" Jan 30 22:33:24 crc kubenswrapper[4721]: I0130 22:33:24.192909 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_12f1cce2-7b07-4519-b1c3-15e57ed44cde/rabbitmq/0.log" Jan 30 22:33:24 crc kubenswrapper[4721]: I0130 22:33:24.259589 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_12f1cce2-7b07-4519-b1c3-15e57ed44cde/setup-container/0.log" Jan 30 22:33:24 crc kubenswrapper[4721]: I0130 22:33:24.660561 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7ccec6ec-8034-4a0f-88a6-b86751e0f22b/setup-container/0.log" Jan 30 22:33:25 crc kubenswrapper[4721]: I0130 22:33:25.464474 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_0a596943-21b2-4c3d-9687-150ce3bde8f7/cloudkitty-proc/0.log" Jan 30 22:33:25 crc kubenswrapper[4721]: I0130 22:33:25.476664 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pbrzx" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" probeResult="failure" output=< Jan 30 22:33:25 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 22:33:25 crc kubenswrapper[4721]: > Jan 30 22:33:25 crc kubenswrapper[4721]: I0130 22:33:25.541664 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7ccec6ec-8034-4a0f-88a6-b86751e0f22b/setup-container/0.log" Jan 30 22:33:25 crc kubenswrapper[4721]: I0130 22:33:25.624212 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7ccec6ec-8034-4a0f-88a6-b86751e0f22b/rabbitmq/0.log" Jan 30 22:33:25 crc kubenswrapper[4721]: I0130 22:33:25.762859 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-bd7kk_ffae7484-d197-4caa-8553-151666fded73/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:25 crc kubenswrapper[4721]: I0130 22:33:25.804862 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-lf29q_97ad0a5f-02dd-48d9-93a4-b7c7b9462879/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.045647 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-7qfn2_afddf697-9175-4a72-8226-bcb7030604f9/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.051894 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-jt6rq_982c1f39-3c88-4f1b-a5ea-4db039e1201e/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.459358 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-4989x_c878af65-d3fd-4eae-9818-a30e27c363ec/ssh-known-hosts-edpm-deployment/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.631438 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-746946b9f5-f7fdd_46e77d9a-8263-4821-be29-a13929dd4448/proxy-server/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.657052 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-746946b9f5-f7fdd_46e77d9a-8263-4821-be29-a13929dd4448/proxy-httpd/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.776141 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hkfr7_81c15104-7d30-43d8-9e3d-9ab1834959da/swift-ring-rebalance/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.927070 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-auditor/0.log" Jan 30 22:33:26 crc kubenswrapper[4721]: I0130 22:33:26.993536 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-reaper/0.log" Jan 30 22:33:27 crc kubenswrapper[4721]: I0130 22:33:27.069836 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-replicator/0.log" Jan 30 22:33:27 crc kubenswrapper[4721]: I0130 22:33:27.907986 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-auditor/0.log" Jan 30 22:33:27 crc kubenswrapper[4721]: I0130 22:33:27.917348 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-replicator/0.log" Jan 30 22:33:27 crc kubenswrapper[4721]: I0130 22:33:27.938922 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-server/0.log" Jan 30 22:33:27 crc kubenswrapper[4721]: I0130 22:33:27.958021 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/account-server/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.120591 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/container-updater/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.198312 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-replicator/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.199926 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-auditor/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.246067 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-expirer/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.411641 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-server/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.442973 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/object-updater/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.498266 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/rsync/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.514396 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fbc194de-ea06-4d56-a35a-4b63a46651df/swift-recon-cron/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.766740 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-xxkgj_bb52513c-6253-41f2-aa93-808d6b9cbb62/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:28 crc kubenswrapper[4721]: I0130 22:33:28.895319 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_519f6e4a-b90c-4146-8ac9-d03854442bdd/tempest-tests-tempest-tests-runner/0.log" Jan 30 22:33:29 crc kubenswrapper[4721]: I0130 22:33:29.115844 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_177f7165-d126-4742-a778-e5e845c54fab/test-operator-logs-container/0.log" Jan 30 22:33:29 crc kubenswrapper[4721]: I0130 22:33:29.280266 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-5np9m_8a1c37f6-c659-4344-ad91-49f56d8fd843/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 30 22:33:35 crc kubenswrapper[4721]: I0130 22:33:35.445939 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pbrzx" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" probeResult="failure" output=< Jan 30 22:33:35 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 22:33:35 crc kubenswrapper[4721]: > Jan 30 22:33:35 crc kubenswrapper[4721]: I0130 22:33:35.466508 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7548d1d0-371f-4bf8-a557-a9734c49a52e/memcached/0.log" Jan 30 22:33:36 crc kubenswrapper[4721]: I0130 22:33:36.092221 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:33:36 crc kubenswrapper[4721]: E0130 22:33:36.092626 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:33:44 crc kubenswrapper[4721]: I0130 22:33:44.440213 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:44 crc kubenswrapper[4721]: I0130 22:33:44.495029 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:44 crc kubenswrapper[4721]: I0130 22:33:44.690970 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pbrzx"] Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.021431 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pbrzx" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" containerID="cri-o://73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2" gracePeriod=2 Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.740686 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.837345 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75s2h\" (UniqueName: \"kubernetes.io/projected/8173627e-5b0d-47e2-a5fd-cef57219a826-kube-api-access-75s2h\") pod \"8173627e-5b0d-47e2-a5fd-cef57219a826\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.837468 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-catalog-content\") pod \"8173627e-5b0d-47e2-a5fd-cef57219a826\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.837602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-utilities\") pod \"8173627e-5b0d-47e2-a5fd-cef57219a826\" (UID: \"8173627e-5b0d-47e2-a5fd-cef57219a826\") " Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.838639 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-utilities" (OuterVolumeSpecName: "utilities") pod "8173627e-5b0d-47e2-a5fd-cef57219a826" (UID: "8173627e-5b0d-47e2-a5fd-cef57219a826"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.940314 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:33:46 crc kubenswrapper[4721]: I0130 22:33:46.964593 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8173627e-5b0d-47e2-a5fd-cef57219a826" (UID: "8173627e-5b0d-47e2-a5fd-cef57219a826"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.034636 4721 generic.go:334] "Generic (PLEG): container finished" podID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerID="73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2" exitCode=0 Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.034677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerDied","Data":"73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2"} Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.034703 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbrzx" event={"ID":"8173627e-5b0d-47e2-a5fd-cef57219a826","Type":"ContainerDied","Data":"60be9b51d08a9fac9163444d59ddbb6e0e3d268b58eb2941af7f9ad201d00267"} Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.034719 4721 scope.go:117] "RemoveContainer" containerID="73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.035733 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbrzx" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.042810 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173627e-5b0d-47e2-a5fd-cef57219a826-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.056388 4721 scope.go:117] "RemoveContainer" containerID="830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.415315 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8173627e-5b0d-47e2-a5fd-cef57219a826-kube-api-access-75s2h" (OuterVolumeSpecName: "kube-api-access-75s2h") pod "8173627e-5b0d-47e2-a5fd-cef57219a826" (UID: "8173627e-5b0d-47e2-a5fd-cef57219a826"). InnerVolumeSpecName "kube-api-access-75s2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.441133 4721 scope.go:117] "RemoveContainer" containerID="aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.451959 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75s2h\" (UniqueName: \"kubernetes.io/projected/8173627e-5b0d-47e2-a5fd-cef57219a826-kube-api-access-75s2h\") on node \"crc\" DevicePath \"\"" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.617967 4721 scope.go:117] "RemoveContainer" containerID="73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2" Jan 30 22:33:47 crc kubenswrapper[4721]: E0130 22:33:47.618555 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2\": container with ID starting with 73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2 not found: ID does not exist" containerID="73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.618585 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2"} err="failed to get container status \"73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2\": rpc error: code = NotFound desc = could not find container \"73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2\": container with ID starting with 73c442aa50e27204fea267be9f20cdd0b89fe0f59a7b177d1f7833ad85bf22d2 not found: ID does not exist" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.618606 4721 scope.go:117] "RemoveContainer" containerID="830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3" Jan 30 22:33:47 crc kubenswrapper[4721]: E0130 22:33:47.619041 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3\": container with ID starting with 830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3 not found: ID does not exist" containerID="830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.619066 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3"} err="failed to get container status \"830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3\": rpc error: code = NotFound desc = could not find container \"830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3\": container with ID starting with 830268b6675f1aa4c44abc36e6153325351b9a193b45ff3efc07fed604559fd3 not found: ID does not exist" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.619078 4721 scope.go:117] "RemoveContainer" containerID="aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583" Jan 30 22:33:47 crc kubenswrapper[4721]: E0130 22:33:47.619495 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583\": container with ID starting with aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583 not found: ID does not exist" containerID="aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.619554 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583"} err="failed to get container status \"aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583\": rpc error: code = NotFound desc = could not find container \"aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583\": container with ID starting with aba252a3764c4704b2dbae43b5353af3f1e46ff9351aedaf3be797116f04b583 not found: ID does not exist" Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.676820 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pbrzx"] Jan 30 22:33:47 crc kubenswrapper[4721]: I0130 22:33:47.686754 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pbrzx"] Jan 30 22:33:48 crc kubenswrapper[4721]: I0130 22:33:48.105151 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" path="/var/lib/kubelet/pods/8173627e-5b0d-47e2-a5fd-cef57219a826/volumes" Jan 30 22:33:49 crc kubenswrapper[4721]: I0130 22:33:49.093065 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:33:49 crc kubenswrapper[4721]: E0130 22:33:49.094215 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:33:59 crc kubenswrapper[4721]: I0130 22:33:59.379096 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/util/0.log" Jan 30 22:33:59 crc kubenswrapper[4721]: I0130 22:33:59.516103 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/util/0.log" Jan 30 22:33:59 crc kubenswrapper[4721]: I0130 22:33:59.585305 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/pull/0.log" Jan 30 22:33:59 crc kubenswrapper[4721]: I0130 22:33:59.592763 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/pull/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.119614 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/util/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.159000 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/extract/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.182007 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3313ea0756d90924df18e6547a3b99ca0fd23e0eaabfca89becdd7bc0gwbth_12495753-1318-435a-b2c4-33b9f35ba86b/pull/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.415351 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-qhpvg_ce8df3e5-ac5d-4782-97fe-b49e9342768a/manager/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.478207 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-ddmz2_6dfaa0a8-aa69-4d52-8740-b1098802644c/manager/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.628703 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-77cms_e5aed1e3-eebf-4e1b-ab1b-1b81b337374e/manager/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.707024 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-snvcj_2dbdf4c9-4962-45ea-ac32-adbb848529d7/manager/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.864727 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-fvxk2_6a148cdc-0a77-4f57-b5e6-1b2acf90a900/manager/0.log" Jan 30 22:34:00 crc kubenswrapper[4721]: I0130 22:34:00.897703 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-m79fw_b6d746e4-3768-42df-956a-c700072e4e4c/manager/0.log" Jan 30 22:34:01 crc kubenswrapper[4721]: I0130 22:34:01.167076 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-mbz59_782f1962-bc39-4162-84ae-acad49911f45/manager/0.log" Jan 30 22:34:01 crc kubenswrapper[4721]: I0130 22:34:01.368941 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-r42gp_b10d3fdb-7237-4461-ba03-ed926092791f/manager/0.log" Jan 30 22:34:01 crc kubenswrapper[4721]: I0130 22:34:01.501393 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-jztwj_7d5fad49-066d-48d6-a9f0-0c3a105df525/manager/0.log" Jan 30 22:34:01 crc kubenswrapper[4721]: I0130 22:34:01.504573 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-scnz2_64d5e6e7-b654-4060-9ba5-82e52e172a3b/manager/0.log" Jan 30 22:34:01 crc kubenswrapper[4721]: I0130 22:34:01.688942 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-6qxsd_e7dd1fd7-f720-45b2-86b4-bc056b1ef360/manager/0.log" Jan 30 22:34:01 crc kubenswrapper[4721]: I0130 22:34:01.795979 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-j5m4f_319096ad-d67b-4344-8bb2-290aafd57bc0/manager/0.log" Jan 30 22:34:02 crc kubenswrapper[4721]: I0130 22:34:02.019642 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-qhqfv_3d3b8ade-729b-4dfc-9ae8-ead1999f9657/manager/0.log" Jan 30 22:34:02 crc kubenswrapper[4721]: I0130 22:34:02.084143 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-2cmth_c53f8f24-7f92-4255-ad09-8a729b4159ab/manager/0.log" Jan 30 22:34:02 crc kubenswrapper[4721]: I0130 22:34:02.225239 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4d76hgf_34543b19-ae6c-4a39-ad40-0dff196f0fd6/manager/0.log" Jan 30 22:34:02 crc kubenswrapper[4721]: I0130 22:34:02.420968 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-8584c7c99f-224n7_16ce971e-b375-4472-bbf9-6310b8524952/operator/0.log" Jan 30 22:34:02 crc kubenswrapper[4721]: I0130 22:34:02.707739 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-hvf87_c9bf578a-adbc-4168-a09b-edf084023bfa/registry-server/0.log" Jan 30 22:34:02 crc kubenswrapper[4721]: I0130 22:34:02.888327 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-x44jj_5be9ffe8-a1a4-4aa1-a704-5443e1ef640b/manager/0.log" Jan 30 22:34:03 crc kubenswrapper[4721]: I0130 22:34:03.165116 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-kgpcd_d4433a61-fd64-4240-8a12-8d86a8a52e77/manager/0.log" Jan 30 22:34:03 crc kubenswrapper[4721]: I0130 22:34:03.253897 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4r7sw_bca228e2-5d0f-415b-943b-530f9291396a/operator/0.log" Jan 30 22:34:03 crc kubenswrapper[4721]: I0130 22:34:03.412943 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-6ptfc_d9650011-7842-4b52-bf3b-728e40294cb4/manager/0.log" Jan 30 22:34:03 crc kubenswrapper[4721]: I0130 22:34:03.681086 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-jktzs_6e34b8b8-6fc5-4444-b957-b6325671ec2a/manager/0.log" Jan 30 22:34:03 crc kubenswrapper[4721]: I0130 22:34:03.818557 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-57c48854c9-4r8wb_1b4a95f6-61df-4aef-b9aa-d9a1ab40f11c/manager/0.log" Jan 30 22:34:03 crc kubenswrapper[4721]: I0130 22:34:03.889777 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-c2c8k_e11d1820-45a9-4ecc-b400-7bbcb6f8b69e/manager/0.log" Jan 30 22:34:04 crc kubenswrapper[4721]: I0130 22:34:04.037627 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-fd77b8dd7-4t9rw_54f2f57d-0269-4ba8-94f5-04873f29e16c/manager/0.log" Jan 30 22:34:04 crc kubenswrapper[4721]: I0130 22:34:04.092882 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:34:05 crc kubenswrapper[4721]: I0130 22:34:05.209032 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"48ebdad53971c68c6e5ffb83001b2f424ee906a261bf938298acaf660ae7162f"} Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.105438 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vg82x"] Jan 30 22:34:08 crc kubenswrapper[4721]: E0130 22:34:08.106528 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="extract-content" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.106542 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="extract-content" Jan 30 22:34:08 crc kubenswrapper[4721]: E0130 22:34:08.106564 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="extract-utilities" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.106573 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="extract-utilities" Jan 30 22:34:08 crc kubenswrapper[4721]: E0130 22:34:08.106582 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.106588 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.106853 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8173627e-5b0d-47e2-a5fd-cef57219a826" containerName="registry-server" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.108669 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.118878 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vg82x"] Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.195175 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fm4t\" (UniqueName: \"kubernetes.io/projected/5ba15f0e-8695-42f8-92ed-19deed96b442-kube-api-access-8fm4t\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.196522 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-catalog-content\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.196676 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-utilities\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.298691 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-catalog-content\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.298781 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-utilities\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.298883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fm4t\" (UniqueName: \"kubernetes.io/projected/5ba15f0e-8695-42f8-92ed-19deed96b442-kube-api-access-8fm4t\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.299229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-catalog-content\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.299370 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-utilities\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.515705 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fm4t\" (UniqueName: \"kubernetes.io/projected/5ba15f0e-8695-42f8-92ed-19deed96b442-kube-api-access-8fm4t\") pod \"redhat-marketplace-vg82x\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:08 crc kubenswrapper[4721]: I0130 22:34:08.729657 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:09 crc kubenswrapper[4721]: I0130 22:34:09.218405 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vg82x"] Jan 30 22:34:09 crc kubenswrapper[4721]: I0130 22:34:09.268247 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerStarted","Data":"8634bb19c262879035b8e46644a94d743021dfb4b1bc8c827ae98a67ab133ec9"} Jan 30 22:34:10 crc kubenswrapper[4721]: I0130 22:34:10.280581 4721 generic.go:334] "Generic (PLEG): container finished" podID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerID="9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d" exitCode=0 Jan 30 22:34:10 crc kubenswrapper[4721]: I0130 22:34:10.280906 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerDied","Data":"9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d"} Jan 30 22:34:11 crc kubenswrapper[4721]: I0130 22:34:11.292833 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerStarted","Data":"fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557"} Jan 30 22:34:12 crc kubenswrapper[4721]: I0130 22:34:12.307367 4721 generic.go:334] "Generic (PLEG): container finished" podID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerID="fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557" exitCode=0 Jan 30 22:34:12 crc kubenswrapper[4721]: I0130 22:34:12.307449 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerDied","Data":"fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557"} Jan 30 22:34:13 crc kubenswrapper[4721]: I0130 22:34:13.320826 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerStarted","Data":"92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f"} Jan 30 22:34:13 crc kubenswrapper[4721]: I0130 22:34:13.344355 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vg82x" podStartSLOduration=2.873039021 podStartE2EDuration="5.34433872s" podCreationTimestamp="2026-01-30 22:34:08 +0000 UTC" firstStartedPulling="2026-01-30 22:34:10.282565757 +0000 UTC m=+4639.074467003" lastFinishedPulling="2026-01-30 22:34:12.753865456 +0000 UTC m=+4641.545766702" observedRunningTime="2026-01-30 22:34:13.341488551 +0000 UTC m=+4642.133389797" watchObservedRunningTime="2026-01-30 22:34:13.34433872 +0000 UTC m=+4642.136239966" Jan 30 22:34:18 crc kubenswrapper[4721]: I0130 22:34:18.729740 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:18 crc kubenswrapper[4721]: I0130 22:34:18.730355 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:19 crc kubenswrapper[4721]: I0130 22:34:19.192844 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:19 crc kubenswrapper[4721]: I0130 22:34:19.434987 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:20 crc kubenswrapper[4721]: I0130 22:34:20.281900 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vg82x"] Jan 30 22:34:21 crc kubenswrapper[4721]: I0130 22:34:21.398522 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vg82x" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="registry-server" containerID="cri-o://92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f" gracePeriod=2 Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.057254 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.193069 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-catalog-content\") pod \"5ba15f0e-8695-42f8-92ed-19deed96b442\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.193247 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-utilities\") pod \"5ba15f0e-8695-42f8-92ed-19deed96b442\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.193330 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fm4t\" (UniqueName: \"kubernetes.io/projected/5ba15f0e-8695-42f8-92ed-19deed96b442-kube-api-access-8fm4t\") pod \"5ba15f0e-8695-42f8-92ed-19deed96b442\" (UID: \"5ba15f0e-8695-42f8-92ed-19deed96b442\") " Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.195475 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-utilities" (OuterVolumeSpecName: "utilities") pod "5ba15f0e-8695-42f8-92ed-19deed96b442" (UID: "5ba15f0e-8695-42f8-92ed-19deed96b442"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.200858 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ba15f0e-8695-42f8-92ed-19deed96b442-kube-api-access-8fm4t" (OuterVolumeSpecName: "kube-api-access-8fm4t") pod "5ba15f0e-8695-42f8-92ed-19deed96b442" (UID: "5ba15f0e-8695-42f8-92ed-19deed96b442"). InnerVolumeSpecName "kube-api-access-8fm4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.227859 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ba15f0e-8695-42f8-92ed-19deed96b442" (UID: "5ba15f0e-8695-42f8-92ed-19deed96b442"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.295883 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.295923 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fm4t\" (UniqueName: \"kubernetes.io/projected/5ba15f0e-8695-42f8-92ed-19deed96b442-kube-api-access-8fm4t\") on node \"crc\" DevicePath \"\"" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.295935 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ba15f0e-8695-42f8-92ed-19deed96b442-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.411176 4721 generic.go:334] "Generic (PLEG): container finished" podID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerID="92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f" exitCode=0 Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.411228 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vg82x" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.411229 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerDied","Data":"92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f"} Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.411346 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vg82x" event={"ID":"5ba15f0e-8695-42f8-92ed-19deed96b442","Type":"ContainerDied","Data":"8634bb19c262879035b8e46644a94d743021dfb4b1bc8c827ae98a67ab133ec9"} Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.411375 4721 scope.go:117] "RemoveContainer" containerID="92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.432217 4721 scope.go:117] "RemoveContainer" containerID="fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.483588 4721 scope.go:117] "RemoveContainer" containerID="9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.497812 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vg82x"] Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.506607 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vg82x"] Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.524946 4721 scope.go:117] "RemoveContainer" containerID="92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f" Jan 30 22:34:22 crc kubenswrapper[4721]: E0130 22:34:22.525893 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f\": container with ID starting with 92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f not found: ID does not exist" containerID="92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.525945 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f"} err="failed to get container status \"92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f\": rpc error: code = NotFound desc = could not find container \"92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f\": container with ID starting with 92bf3c42a2ea0113bf0236eba46ae2ae71c9ed6acbb1d08435be8554049d1d7f not found: ID does not exist" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.525995 4721 scope.go:117] "RemoveContainer" containerID="fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557" Jan 30 22:34:22 crc kubenswrapper[4721]: E0130 22:34:22.526832 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557\": container with ID starting with fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557 not found: ID does not exist" containerID="fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.526860 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557"} err="failed to get container status \"fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557\": rpc error: code = NotFound desc = could not find container \"fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557\": container with ID starting with fa8d472fceccbd0c18baef9fa81a6c6bece3b51f5d1f7d581e87a3fafeae4557 not found: ID does not exist" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.526874 4721 scope.go:117] "RemoveContainer" containerID="9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d" Jan 30 22:34:22 crc kubenswrapper[4721]: E0130 22:34:22.527193 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d\": container with ID starting with 9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d not found: ID does not exist" containerID="9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d" Jan 30 22:34:22 crc kubenswrapper[4721]: I0130 22:34:22.527241 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d"} err="failed to get container status \"9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d\": rpc error: code = NotFound desc = could not find container \"9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d\": container with ID starting with 9c3d689e54f924c4640b8f1ebc80100ac7a680753ab61b4386a62694c4cdfd0d not found: ID does not exist" Jan 30 22:34:24 crc kubenswrapper[4721]: I0130 22:34:24.109453 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" path="/var/lib/kubelet/pods/5ba15f0e-8695-42f8-92ed-19deed96b442/volumes" Jan 30 22:34:26 crc kubenswrapper[4721]: I0130 22:34:26.953557 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-m2qpz_36b59aa6-8bde-4935-82ce-04ef6d8ec10c/control-plane-machine-set-operator/0.log" Jan 30 22:34:27 crc kubenswrapper[4721]: I0130 22:34:27.247234 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rhwvl_834ba560-2a16-437b-8d57-20a0017ee78f/kube-rbac-proxy/0.log" Jan 30 22:34:27 crc kubenswrapper[4721]: I0130 22:34:27.312423 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rhwvl_834ba560-2a16-437b-8d57-20a0017ee78f/machine-api-operator/0.log" Jan 30 22:34:42 crc kubenswrapper[4721]: I0130 22:34:42.708827 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-dwdzb_d4f08327-9c54-4b81-a397-77de365f3c7d/cert-manager-controller/0.log" Jan 30 22:34:42 crc kubenswrapper[4721]: I0130 22:34:42.885971 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-kv2jr_02aa71d8-1558-4083-b360-d40f9bd180fb/cert-manager-cainjector/0.log" Jan 30 22:34:42 crc kubenswrapper[4721]: I0130 22:34:42.913424 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-7wqvq_7a28e689-3208-4314-a5d9-c06c110c2482/cert-manager-webhook/0.log" Jan 30 22:34:57 crc kubenswrapper[4721]: I0130 22:34:57.543893 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-4n7dn_13af9eb8-866f-4f4f-9698-e1208720edea/nmstate-console-plugin/0.log" Jan 30 22:34:57 crc kubenswrapper[4721]: I0130 22:34:57.769377 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-mqjg9_acdc2ab0-dd4b-4cb0-a325-ae7569073244/nmstate-handler/0.log" Jan 30 22:34:57 crc kubenswrapper[4721]: I0130 22:34:57.780628 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-cv4hc_fc4f4701-25d8-4af1-9128-d625c2448550/kube-rbac-proxy/0.log" Jan 30 22:34:57 crc kubenswrapper[4721]: I0130 22:34:57.981944 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-cv4hc_fc4f4701-25d8-4af1-9128-d625c2448550/nmstate-metrics/0.log" Jan 30 22:34:58 crc kubenswrapper[4721]: I0130 22:34:58.032072 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-swldn_9ff9ae3f-3898-4963-996f-5ee35048f5af/nmstate-operator/0.log" Jan 30 22:34:58 crc kubenswrapper[4721]: I0130 22:34:58.193280 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-t8rgz_b623908d-b2f5-49d0-9810-a2638fee1d6a/nmstate-webhook/0.log" Jan 30 22:35:12 crc kubenswrapper[4721]: I0130 22:35:12.492695 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/kube-rbac-proxy/0.log" Jan 30 22:35:12 crc kubenswrapper[4721]: I0130 22:35:12.555375 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/manager/0.log" Jan 30 22:35:27 crc kubenswrapper[4721]: I0130 22:35:27.374748 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-lxm2h_64ed6731-ff9c-4a61-b696-00dcac24cb8d/prometheus-operator/0.log" Jan 30 22:35:27 crc kubenswrapper[4721]: I0130 22:35:27.600588 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-25fb7_90e03b69-5f4f-469a-a8a0-82bc942a47e7/prometheus-operator-admission-webhook/0.log" Jan 30 22:35:27 crc kubenswrapper[4721]: I0130 22:35:27.669553 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-98r68_5c65d4e6-4f21-4298-a63e-7390c6588e3f/prometheus-operator-admission-webhook/0.log" Jan 30 22:35:28 crc kubenswrapper[4721]: I0130 22:35:28.225071 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rw7xg_6007c115-e448-4886-9aa2-14a72217c0bd/operator/0.log" Jan 30 22:35:28 crc kubenswrapper[4721]: I0130 22:35:28.278388 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hnrqb_e7ea1c63-c2af-4258-9864-8e09c708d507/perses-operator/0.log" Jan 30 22:35:45 crc kubenswrapper[4721]: I0130 22:35:45.666742 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-4xz99_852b81d6-0da5-4035-841b-2613bd3f2561/kube-rbac-proxy/0.log" Jan 30 22:35:45 crc kubenswrapper[4721]: I0130 22:35:45.762988 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-4xz99_852b81d6-0da5-4035-841b-2613bd3f2561/controller/0.log" Jan 30 22:35:45 crc kubenswrapper[4721]: I0130 22:35:45.901766 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.152872 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.205432 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.216192 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.237423 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.431339 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.431408 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.445826 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.454703 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.661764 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-frr-files/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.688862 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/controller/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.704866 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-reloader/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.710316 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/cp-metrics/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.899580 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/frr-metrics/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.906965 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/kube-rbac-proxy-frr/0.log" Jan 30 22:35:46 crc kubenswrapper[4721]: I0130 22:35:46.907480 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/kube-rbac-proxy/0.log" Jan 30 22:35:47 crc kubenswrapper[4721]: I0130 22:35:47.190662 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/reloader/0.log" Jan 30 22:35:47 crc kubenswrapper[4721]: I0130 22:35:47.253423 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-t5qjr_3aee18f8-337b-4dfc-9951-c44ea52f5193/frr-k8s-webhook-server/0.log" Jan 30 22:35:47 crc kubenswrapper[4721]: I0130 22:35:47.571265 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5785c9bddd-96xvw_58594e33-cffd-4e67-99a7-7f3fb6b0d6f0/manager/0.log" Jan 30 22:35:47 crc kubenswrapper[4721]: I0130 22:35:47.725327 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5bf68458df-dm6mf_dae4a1c8-c40a-4506-b9c8-b2146ef8c480/webhook-server/0.log" Jan 30 22:35:47 crc kubenswrapper[4721]: I0130 22:35:47.851317 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-flglj_d5447d1b-a776-4ddb-a90a-e926273205f3/kube-rbac-proxy/0.log" Jan 30 22:35:48 crc kubenswrapper[4721]: I0130 22:35:48.411426 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thrjz_501b07ab-d449-4910-a0f2-e37dcef83f0b/frr/0.log" Jan 30 22:35:48 crc kubenswrapper[4721]: I0130 22:35:48.474263 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-flglj_d5447d1b-a776-4ddb-a90a-e926273205f3/speaker/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.243352 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/util/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.480036 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/util/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.511414 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/pull/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.546849 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/pull/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.812391 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/pull/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.814934 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/extract/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.825227 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zhnm_b4eea3e4-dddd-4939-aa4b-6a1e4c0df5e6/util/0.log" Jan 30 22:36:03 crc kubenswrapper[4721]: I0130 22:36:03.995411 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/util/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.248860 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/util/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.249922 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/pull/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.266993 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/pull/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.429056 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/util/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.455729 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/extract/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.464029 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713f87fm_02297407-20a9-4d67-8952-9e0b267ab930/pull/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.616313 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-utilities/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.812916 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-utilities/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.816435 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-content/0.log" Jan 30 22:36:04 crc kubenswrapper[4721]: I0130 22:36:04.817846 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-content/0.log" Jan 30 22:36:05 crc kubenswrapper[4721]: I0130 22:36:05.072730 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-content/0.log" Jan 30 22:36:05 crc kubenswrapper[4721]: I0130 22:36:05.133916 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/extract-utilities/0.log" Jan 30 22:36:05 crc kubenswrapper[4721]: I0130 22:36:05.329998 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-utilities/0.log" Jan 30 22:36:05 crc kubenswrapper[4721]: I0130 22:36:05.732985 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pn4j5_bd89314e-6d14-49eb-9cf8-448f8fde27e1/registry-server/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.383716 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-utilities/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.435761 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-content/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.456609 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-content/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.669273 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-content/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.696078 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/extract-utilities/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.963522 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-jjn55_8da1faa6-dbea-4a4c-a83d-b6a51551ab85/marketplace-operator/0.log" Jan 30 22:36:06 crc kubenswrapper[4721]: I0130 22:36:06.968978 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-utilities/0.log" Jan 30 22:36:07 crc kubenswrapper[4721]: I0130 22:36:07.290477 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-utilities/0.log" Jan 30 22:36:07 crc kubenswrapper[4721]: I0130 22:36:07.390683 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rh5tn_36b5da67-7fd7-4ddb-9f14-c8de51a88e05/registry-server/0.log" Jan 30 22:36:07 crc kubenswrapper[4721]: I0130 22:36:07.402271 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-content/0.log" Jan 30 22:36:07 crc kubenswrapper[4721]: I0130 22:36:07.417991 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-content/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.208321 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-utilities/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.312425 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/extract-content/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.364789 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-utilities/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.463852 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zc72v_abb2e581-41fd-4409-893a-0f8a4b7ebd31/registry-server/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.598721 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-content/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.621917 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-utilities/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.651613 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-content/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.820601 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-utilities/0.log" Jan 30 22:36:08 crc kubenswrapper[4721]: I0130 22:36:08.899192 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/extract-content/0.log" Jan 30 22:36:09 crc kubenswrapper[4721]: I0130 22:36:09.484185 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xgdzf_7c95c9c5-93b7-4fe9-81fe-3a893a01fb29/registry-server/0.log" Jan 30 22:36:21 crc kubenswrapper[4721]: I0130 22:36:21.965724 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-lxm2h_64ed6731-ff9c-4a61-b696-00dcac24cb8d/prometheus-operator/0.log" Jan 30 22:36:21 crc kubenswrapper[4721]: I0130 22:36:21.977410 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-25fb7_90e03b69-5f4f-469a-a8a0-82bc942a47e7/prometheus-operator-admission-webhook/0.log" Jan 30 22:36:22 crc kubenswrapper[4721]: I0130 22:36:22.019069 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79f76f558b-98r68_5c65d4e6-4f21-4298-a63e-7390c6588e3f/prometheus-operator-admission-webhook/0.log" Jan 30 22:36:22 crc kubenswrapper[4721]: I0130 22:36:22.199971 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hnrqb_e7ea1c63-c2af-4258-9864-8e09c708d507/perses-operator/0.log" Jan 30 22:36:22 crc kubenswrapper[4721]: I0130 22:36:22.207213 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rw7xg_6007c115-e448-4886-9aa2-14a72217c0bd/operator/0.log" Jan 30 22:36:29 crc kubenswrapper[4721]: I0130 22:36:29.448118 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:36:29 crc kubenswrapper[4721]: I0130 22:36:29.448699 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:36:36 crc kubenswrapper[4721]: I0130 22:36:36.331208 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/kube-rbac-proxy/0.log" Jan 30 22:36:36 crc kubenswrapper[4721]: I0130 22:36:36.379433 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5d697845c-gth7c_c86bc11f-3071-4387-9368-da8a53cc69b8/manager/0.log" Jan 30 22:36:59 crc kubenswrapper[4721]: I0130 22:36:59.448821 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:36:59 crc kubenswrapper[4721]: I0130 22:36:59.449394 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:37:29 crc kubenswrapper[4721]: I0130 22:37:29.449130 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:37:29 crc kubenswrapper[4721]: I0130 22:37:29.449769 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:37:29 crc kubenswrapper[4721]: I0130 22:37:29.449832 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:37:29 crc kubenswrapper[4721]: I0130 22:37:29.450839 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"48ebdad53971c68c6e5ffb83001b2f424ee906a261bf938298acaf660ae7162f"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:37:29 crc kubenswrapper[4721]: I0130 22:37:29.450915 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://48ebdad53971c68c6e5ffb83001b2f424ee906a261bf938298acaf660ae7162f" gracePeriod=600 Jan 30 22:37:30 crc kubenswrapper[4721]: I0130 22:37:30.229995 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="48ebdad53971c68c6e5ffb83001b2f424ee906a261bf938298acaf660ae7162f" exitCode=0 Jan 30 22:37:30 crc kubenswrapper[4721]: I0130 22:37:30.230072 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"48ebdad53971c68c6e5ffb83001b2f424ee906a261bf938298acaf660ae7162f"} Jan 30 22:37:30 crc kubenswrapper[4721]: I0130 22:37:30.230416 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerStarted","Data":"19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60"} Jan 30 22:37:30 crc kubenswrapper[4721]: I0130 22:37:30.230448 4721 scope.go:117] "RemoveContainer" containerID="431c5dd43f5f7e09359055aa756aef1952f42f0aad3c80880a3f5a76214fd8c4" Jan 30 22:38:17 crc kubenswrapper[4721]: I0130 22:38:17.608858 4721 scope.go:117] "RemoveContainer" containerID="d655a09627d291b2cfb9d750d271e6507487e85f1fdf5d1b558a1f58d0d93c40" Jan 30 22:38:26 crc kubenswrapper[4721]: I0130 22:38:26.803566 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ec87141-36fc-4527-a7ad-91fead722da3" containerID="a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2" exitCode=0 Jan 30 22:38:26 crc kubenswrapper[4721]: I0130 22:38:26.803635 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2mv76/must-gather-8wbxc" event={"ID":"8ec87141-36fc-4527-a7ad-91fead722da3","Type":"ContainerDied","Data":"a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2"} Jan 30 22:38:26 crc kubenswrapper[4721]: I0130 22:38:26.804764 4721 scope.go:117] "RemoveContainer" containerID="a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2" Jan 30 22:38:27 crc kubenswrapper[4721]: I0130 22:38:27.649876 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2mv76_must-gather-8wbxc_8ec87141-36fc-4527-a7ad-91fead722da3/gather/0.log" Jan 30 22:38:39 crc kubenswrapper[4721]: I0130 22:38:39.733727 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2mv76/must-gather-8wbxc"] Jan 30 22:38:39 crc kubenswrapper[4721]: I0130 22:38:39.734683 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-2mv76/must-gather-8wbxc" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="copy" containerID="cri-o://68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145" gracePeriod=2 Jan 30 22:38:39 crc kubenswrapper[4721]: I0130 22:38:39.744252 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2mv76/must-gather-8wbxc"] Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.847221 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2mv76_must-gather-8wbxc_8ec87141-36fc-4527-a7ad-91fead722da3/copy/0.log" Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.848214 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.937569 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ec87141-36fc-4527-a7ad-91fead722da3-must-gather-output\") pod \"8ec87141-36fc-4527-a7ad-91fead722da3\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.937944 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8svr\" (UniqueName: \"kubernetes.io/projected/8ec87141-36fc-4527-a7ad-91fead722da3-kube-api-access-n8svr\") pod \"8ec87141-36fc-4527-a7ad-91fead722da3\" (UID: \"8ec87141-36fc-4527-a7ad-91fead722da3\") " Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.942473 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2mv76_must-gather-8wbxc_8ec87141-36fc-4527-a7ad-91fead722da3/copy/0.log" Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.943032 4721 generic.go:334] "Generic (PLEG): container finished" podID="8ec87141-36fc-4527-a7ad-91fead722da3" containerID="68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145" exitCode=143 Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.943075 4721 scope.go:117] "RemoveContainer" containerID="68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145" Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.943161 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2mv76/must-gather-8wbxc" Jan 30 22:38:40 crc kubenswrapper[4721]: I0130 22:38:40.943336 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ec87141-36fc-4527-a7ad-91fead722da3-kube-api-access-n8svr" (OuterVolumeSpecName: "kube-api-access-n8svr") pod "8ec87141-36fc-4527-a7ad-91fead722da3" (UID: "8ec87141-36fc-4527-a7ad-91fead722da3"). InnerVolumeSpecName "kube-api-access-n8svr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.014587 4721 scope.go:117] "RemoveContainer" containerID="a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.043119 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8svr\" (UniqueName: \"kubernetes.io/projected/8ec87141-36fc-4527-a7ad-91fead722da3-kube-api-access-n8svr\") on node \"crc\" DevicePath \"\"" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.060499 4721 scope.go:117] "RemoveContainer" containerID="68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145" Jan 30 22:38:41 crc kubenswrapper[4721]: E0130 22:38:41.060951 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145\": container with ID starting with 68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145 not found: ID does not exist" containerID="68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.061004 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145"} err="failed to get container status \"68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145\": rpc error: code = NotFound desc = could not find container \"68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145\": container with ID starting with 68da2e9d23b5db721bfde71f71aa91787ad515d1dd63704bbfa56423556d9145 not found: ID does not exist" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.061037 4721 scope.go:117] "RemoveContainer" containerID="a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2" Jan 30 22:38:41 crc kubenswrapper[4721]: E0130 22:38:41.061483 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2\": container with ID starting with a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2 not found: ID does not exist" containerID="a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.061516 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2"} err="failed to get container status \"a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2\": rpc error: code = NotFound desc = could not find container \"a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2\": container with ID starting with a8124f213a913c3f1a4af471a279ed1566b80cf1f1239770eb7fb7ae8f13c7f2 not found: ID does not exist" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.137763 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ec87141-36fc-4527-a7ad-91fead722da3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "8ec87141-36fc-4527-a7ad-91fead722da3" (UID: "8ec87141-36fc-4527-a7ad-91fead722da3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:38:41 crc kubenswrapper[4721]: I0130 22:38:41.145332 4721 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ec87141-36fc-4527-a7ad-91fead722da3-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 30 22:38:42 crc kubenswrapper[4721]: I0130 22:38:42.104869 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" path="/var/lib/kubelet/pods/8ec87141-36fc-4527-a7ad-91fead722da3/volumes" Jan 30 22:39:18 crc kubenswrapper[4721]: I0130 22:39:18.159251 4721 scope.go:117] "RemoveContainer" containerID="e79e3d9cef64153b34defe7fc077a468e4153cf46fe7608936e41ee1ce4e02c5" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.049757 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8bjvw"] Jan 30 22:39:22 crc kubenswrapper[4721]: E0130 22:39:22.050910 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="copy" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.050927 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="copy" Jan 30 22:39:22 crc kubenswrapper[4721]: E0130 22:39:22.050939 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="extract-utilities" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.050948 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="extract-utilities" Jan 30 22:39:22 crc kubenswrapper[4721]: E0130 22:39:22.050965 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="registry-server" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.050974 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="registry-server" Jan 30 22:39:22 crc kubenswrapper[4721]: E0130 22:39:22.050997 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="gather" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.051004 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="gather" Jan 30 22:39:22 crc kubenswrapper[4721]: E0130 22:39:22.051039 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="extract-content" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.051048 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="extract-content" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.051459 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="copy" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.051584 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ec87141-36fc-4527-a7ad-91fead722da3" containerName="gather" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.051595 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ba15f0e-8695-42f8-92ed-19deed96b442" containerName="registry-server" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.053471 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.066513 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bjvw"] Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.098234 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7413d7d-a39c-4dbf-a57f-1776364cbff6-utilities\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.098454 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7413d7d-a39c-4dbf-a57f-1776364cbff6-catalog-content\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.098596 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwffc\" (UniqueName: \"kubernetes.io/projected/f7413d7d-a39c-4dbf-a57f-1776364cbff6-kube-api-access-vwffc\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.200776 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwffc\" (UniqueName: \"kubernetes.io/projected/f7413d7d-a39c-4dbf-a57f-1776364cbff6-kube-api-access-vwffc\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.200866 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7413d7d-a39c-4dbf-a57f-1776364cbff6-utilities\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.201018 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7413d7d-a39c-4dbf-a57f-1776364cbff6-catalog-content\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.202044 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7413d7d-a39c-4dbf-a57f-1776364cbff6-catalog-content\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.202134 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7413d7d-a39c-4dbf-a57f-1776364cbff6-utilities\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.232400 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwffc\" (UniqueName: \"kubernetes.io/projected/f7413d7d-a39c-4dbf-a57f-1776364cbff6-kube-api-access-vwffc\") pod \"certified-operators-8bjvw\" (UID: \"f7413d7d-a39c-4dbf-a57f-1776364cbff6\") " pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:22 crc kubenswrapper[4721]: I0130 22:39:22.377262 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:23 crc kubenswrapper[4721]: I0130 22:39:23.000427 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bjvw"] Jan 30 22:39:23 crc kubenswrapper[4721]: I0130 22:39:23.356598 4721 generic.go:334] "Generic (PLEG): container finished" podID="f7413d7d-a39c-4dbf-a57f-1776364cbff6" containerID="bfc54fb2394c226db14b6c4598b6e62aec10a418f48a2ed07b867d7a8e38df90" exitCode=0 Jan 30 22:39:23 crc kubenswrapper[4721]: I0130 22:39:23.356765 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bjvw" event={"ID":"f7413d7d-a39c-4dbf-a57f-1776364cbff6","Type":"ContainerDied","Data":"bfc54fb2394c226db14b6c4598b6e62aec10a418f48a2ed07b867d7a8e38df90"} Jan 30 22:39:23 crc kubenswrapper[4721]: I0130 22:39:23.356932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bjvw" event={"ID":"f7413d7d-a39c-4dbf-a57f-1776364cbff6","Type":"ContainerStarted","Data":"21e7cb45c7232d6d1568b43b147046ae3b04d89ede6ee879d8bcc25166871459"} Jan 30 22:39:23 crc kubenswrapper[4721]: I0130 22:39:23.358986 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 22:39:29 crc kubenswrapper[4721]: I0130 22:39:29.448831 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:39:29 crc kubenswrapper[4721]: I0130 22:39:29.449466 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:39:29 crc kubenswrapper[4721]: I0130 22:39:29.455412 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bjvw" event={"ID":"f7413d7d-a39c-4dbf-a57f-1776364cbff6","Type":"ContainerStarted","Data":"7474dde5b34b42a3be6b1dfe1993b57a889405d9505a9b0dada02ad9c0570129"} Jan 30 22:39:30 crc kubenswrapper[4721]: I0130 22:39:30.468492 4721 generic.go:334] "Generic (PLEG): container finished" podID="f7413d7d-a39c-4dbf-a57f-1776364cbff6" containerID="7474dde5b34b42a3be6b1dfe1993b57a889405d9505a9b0dada02ad9c0570129" exitCode=0 Jan 30 22:39:30 crc kubenswrapper[4721]: I0130 22:39:30.468619 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bjvw" event={"ID":"f7413d7d-a39c-4dbf-a57f-1776364cbff6","Type":"ContainerDied","Data":"7474dde5b34b42a3be6b1dfe1993b57a889405d9505a9b0dada02ad9c0570129"} Jan 30 22:39:31 crc kubenswrapper[4721]: I0130 22:39:31.478701 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bjvw" event={"ID":"f7413d7d-a39c-4dbf-a57f-1776364cbff6","Type":"ContainerStarted","Data":"a61519a528b1c98308295a7432634eb3c683455b5972282642f02302b75a9729"} Jan 30 22:39:31 crc kubenswrapper[4721]: I0130 22:39:31.501709 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8bjvw" podStartSLOduration=1.9657328349999998 podStartE2EDuration="9.501683338s" podCreationTimestamp="2026-01-30 22:39:22 +0000 UTC" firstStartedPulling="2026-01-30 22:39:23.358780854 +0000 UTC m=+4952.150682100" lastFinishedPulling="2026-01-30 22:39:30.894731357 +0000 UTC m=+4959.686632603" observedRunningTime="2026-01-30 22:39:31.495716631 +0000 UTC m=+4960.287617867" watchObservedRunningTime="2026-01-30 22:39:31.501683338 +0000 UTC m=+4960.293584594" Jan 30 22:39:32 crc kubenswrapper[4721]: I0130 22:39:32.377595 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:32 crc kubenswrapper[4721]: I0130 22:39:32.377965 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:33 crc kubenswrapper[4721]: I0130 22:39:33.430069 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-8bjvw" podUID="f7413d7d-a39c-4dbf-a57f-1776364cbff6" containerName="registry-server" probeResult="failure" output=< Jan 30 22:39:33 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Jan 30 22:39:33 crc kubenswrapper[4721]: > Jan 30 22:39:42 crc kubenswrapper[4721]: I0130 22:39:42.424992 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:42 crc kubenswrapper[4721]: I0130 22:39:42.475928 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8bjvw" Jan 30 22:39:42 crc kubenswrapper[4721]: I0130 22:39:42.658184 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bjvw"] Jan 30 22:39:42 crc kubenswrapper[4721]: I0130 22:39:42.835429 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pn4j5"] Jan 30 22:39:42 crc kubenswrapper[4721]: I0130 22:39:42.835701 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pn4j5" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="registry-server" containerID="cri-o://8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373" gracePeriod=2 Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.478240 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.581329 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fxgn\" (UniqueName: \"kubernetes.io/projected/bd89314e-6d14-49eb-9cf8-448f8fde27e1-kube-api-access-5fxgn\") pod \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.581371 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-catalog-content\") pod \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.581536 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-utilities\") pod \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\" (UID: \"bd89314e-6d14-49eb-9cf8-448f8fde27e1\") " Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.583934 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-utilities" (OuterVolumeSpecName: "utilities") pod "bd89314e-6d14-49eb-9cf8-448f8fde27e1" (UID: "bd89314e-6d14-49eb-9cf8-448f8fde27e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.592797 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd89314e-6d14-49eb-9cf8-448f8fde27e1-kube-api-access-5fxgn" (OuterVolumeSpecName: "kube-api-access-5fxgn") pod "bd89314e-6d14-49eb-9cf8-448f8fde27e1" (UID: "bd89314e-6d14-49eb-9cf8-448f8fde27e1"). InnerVolumeSpecName "kube-api-access-5fxgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.606288 4721 generic.go:334] "Generic (PLEG): container finished" podID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerID="8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373" exitCode=0 Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.607455 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pn4j5" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.608042 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pn4j5" event={"ID":"bd89314e-6d14-49eb-9cf8-448f8fde27e1","Type":"ContainerDied","Data":"8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373"} Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.608086 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pn4j5" event={"ID":"bd89314e-6d14-49eb-9cf8-448f8fde27e1","Type":"ContainerDied","Data":"d4a9b96a919a6a31b5e9dca196a60a3ef4add047530a8bbd37ea8c199ac9f144"} Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.608107 4721 scope.go:117] "RemoveContainer" containerID="8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.660585 4721 scope.go:117] "RemoveContainer" containerID="c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.685448 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.685490 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fxgn\" (UniqueName: \"kubernetes.io/projected/bd89314e-6d14-49eb-9cf8-448f8fde27e1-kube-api-access-5fxgn\") on node \"crc\" DevicePath \"\"" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.708064 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd89314e-6d14-49eb-9cf8-448f8fde27e1" (UID: "bd89314e-6d14-49eb-9cf8-448f8fde27e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.712900 4721 scope.go:117] "RemoveContainer" containerID="796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.742704 4721 scope.go:117] "RemoveContainer" containerID="8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373" Jan 30 22:39:43 crc kubenswrapper[4721]: E0130 22:39:43.743196 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373\": container with ID starting with 8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373 not found: ID does not exist" containerID="8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.743241 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373"} err="failed to get container status \"8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373\": rpc error: code = NotFound desc = could not find container \"8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373\": container with ID starting with 8bcdfc3f9a1c60dd86a4043c2681f3c5dd8eb1ca2f03a1e949a35a248c338373 not found: ID does not exist" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.743268 4721 scope.go:117] "RemoveContainer" containerID="c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f" Jan 30 22:39:43 crc kubenswrapper[4721]: E0130 22:39:43.743610 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f\": container with ID starting with c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f not found: ID does not exist" containerID="c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.743649 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f"} err="failed to get container status \"c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f\": rpc error: code = NotFound desc = could not find container \"c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f\": container with ID starting with c896fe7e2ba78890e5016be122e02fdaa6a12683415a20bc46a0d6d050e4209f not found: ID does not exist" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.743669 4721 scope.go:117] "RemoveContainer" containerID="796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847" Jan 30 22:39:43 crc kubenswrapper[4721]: E0130 22:39:43.743983 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847\": container with ID starting with 796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847 not found: ID does not exist" containerID="796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.744024 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847"} err="failed to get container status \"796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847\": rpc error: code = NotFound desc = could not find container \"796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847\": container with ID starting with 796f2fbf10bd836cdbcd811843be7e2c550135c562139206cf59a235ea6a0847 not found: ID does not exist" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.787750 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd89314e-6d14-49eb-9cf8-448f8fde27e1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.941959 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pn4j5"] Jan 30 22:39:43 crc kubenswrapper[4721]: I0130 22:39:43.954555 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pn4j5"] Jan 30 22:39:44 crc kubenswrapper[4721]: I0130 22:39:44.116191 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" path="/var/lib/kubelet/pods/bd89314e-6d14-49eb-9cf8-448f8fde27e1/volumes" Jan 30 22:39:59 crc kubenswrapper[4721]: I0130 22:39:59.449172 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:39:59 crc kubenswrapper[4721]: I0130 22:39:59.449876 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.114329 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pgnks"] Jan 30 22:40:12 crc kubenswrapper[4721]: E0130 22:40:12.115508 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="extract-utilities" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.115529 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="extract-utilities" Jan 30 22:40:12 crc kubenswrapper[4721]: E0130 22:40:12.115540 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="extract-content" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.115550 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="extract-content" Jan 30 22:40:12 crc kubenswrapper[4721]: E0130 22:40:12.115568 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="registry-server" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.115578 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="registry-server" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.115873 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd89314e-6d14-49eb-9cf8-448f8fde27e1" containerName="registry-server" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.117758 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.130365 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pgnks"] Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.280501 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-utilities\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.280669 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-catalog-content\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.280757 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nddpd\" (UniqueName: \"kubernetes.io/projected/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-kube-api-access-nddpd\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.383676 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-utilities\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.383823 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-catalog-content\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.383903 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nddpd\" (UniqueName: \"kubernetes.io/projected/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-kube-api-access-nddpd\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.384124 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-utilities\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.384369 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-catalog-content\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.410578 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nddpd\" (UniqueName: \"kubernetes.io/projected/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-kube-api-access-nddpd\") pod \"community-operators-pgnks\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.440690 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:12 crc kubenswrapper[4721]: I0130 22:40:12.972698 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pgnks"] Jan 30 22:40:13 crc kubenswrapper[4721]: I0130 22:40:13.959935 4721 generic.go:334] "Generic (PLEG): container finished" podID="9872e4d2-a63d-4c3b-a866-5d6df6c581b3" containerID="52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9" exitCode=0 Jan 30 22:40:13 crc kubenswrapper[4721]: I0130 22:40:13.960043 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerDied","Data":"52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9"} Jan 30 22:40:13 crc kubenswrapper[4721]: I0130 22:40:13.960249 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerStarted","Data":"cfb8f91874824d6256cd60b1fca3e2d30cc4de5fb35faa4a12bf8ba7125c7819"} Jan 30 22:40:14 crc kubenswrapper[4721]: I0130 22:40:14.973104 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerStarted","Data":"608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581"} Jan 30 22:40:16 crc kubenswrapper[4721]: I0130 22:40:16.991743 4721 generic.go:334] "Generic (PLEG): container finished" podID="9872e4d2-a63d-4c3b-a866-5d6df6c581b3" containerID="608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581" exitCode=0 Jan 30 22:40:16 crc kubenswrapper[4721]: I0130 22:40:16.991825 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerDied","Data":"608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581"} Jan 30 22:40:18 crc kubenswrapper[4721]: I0130 22:40:18.007152 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerStarted","Data":"918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3"} Jan 30 22:40:18 crc kubenswrapper[4721]: I0130 22:40:18.051786 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pgnks" podStartSLOduration=2.513423884 podStartE2EDuration="6.051765341s" podCreationTimestamp="2026-01-30 22:40:12 +0000 UTC" firstStartedPulling="2026-01-30 22:40:13.96538941 +0000 UTC m=+5002.757290656" lastFinishedPulling="2026-01-30 22:40:17.503730867 +0000 UTC m=+5006.295632113" observedRunningTime="2026-01-30 22:40:18.045345209 +0000 UTC m=+5006.837246475" watchObservedRunningTime="2026-01-30 22:40:18.051765341 +0000 UTC m=+5006.843666587" Jan 30 22:40:22 crc kubenswrapper[4721]: I0130 22:40:22.441182 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:22 crc kubenswrapper[4721]: I0130 22:40:22.441750 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:22 crc kubenswrapper[4721]: I0130 22:40:22.490428 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:23 crc kubenswrapper[4721]: I0130 22:40:23.133053 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:23 crc kubenswrapper[4721]: I0130 22:40:23.183471 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pgnks"] Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.074396 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pgnks" podUID="9872e4d2-a63d-4c3b-a866-5d6df6c581b3" containerName="registry-server" containerID="cri-o://918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3" gracePeriod=2 Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.748121 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.890764 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nddpd\" (UniqueName: \"kubernetes.io/projected/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-kube-api-access-nddpd\") pod \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.890845 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-utilities\") pod \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.891036 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-catalog-content\") pod \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\" (UID: \"9872e4d2-a63d-4c3b-a866-5d6df6c581b3\") " Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.892384 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-utilities" (OuterVolumeSpecName: "utilities") pod "9872e4d2-a63d-4c3b-a866-5d6df6c581b3" (UID: "9872e4d2-a63d-4c3b-a866-5d6df6c581b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.900504 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-kube-api-access-nddpd" (OuterVolumeSpecName: "kube-api-access-nddpd") pod "9872e4d2-a63d-4c3b-a866-5d6df6c581b3" (UID: "9872e4d2-a63d-4c3b-a866-5d6df6c581b3"). InnerVolumeSpecName "kube-api-access-nddpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.995988 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nddpd\" (UniqueName: \"kubernetes.io/projected/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-kube-api-access-nddpd\") on node \"crc\" DevicePath \"\"" Jan 30 22:40:25 crc kubenswrapper[4721]: I0130 22:40:25.996540 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.030306 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9872e4d2-a63d-4c3b-a866-5d6df6c581b3" (UID: "9872e4d2-a63d-4c3b-a866-5d6df6c581b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.086098 4721 generic.go:334] "Generic (PLEG): container finished" podID="9872e4d2-a63d-4c3b-a866-5d6df6c581b3" containerID="918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3" exitCode=0 Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.086164 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgnks" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.086165 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerDied","Data":"918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3"} Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.087214 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgnks" event={"ID":"9872e4d2-a63d-4c3b-a866-5d6df6c581b3","Type":"ContainerDied","Data":"cfb8f91874824d6256cd60b1fca3e2d30cc4de5fb35faa4a12bf8ba7125c7819"} Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.087239 4721 scope.go:117] "RemoveContainer" containerID="918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.101443 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9872e4d2-a63d-4c3b-a866-5d6df6c581b3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.112949 4721 scope.go:117] "RemoveContainer" containerID="608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.132740 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pgnks"] Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.154611 4721 scope.go:117] "RemoveContainer" containerID="52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.161569 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pgnks"] Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.179871 4721 scope.go:117] "RemoveContainer" containerID="918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3" Jan 30 22:40:26 crc kubenswrapper[4721]: E0130 22:40:26.180306 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3\": container with ID starting with 918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3 not found: ID does not exist" containerID="918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.180348 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3"} err="failed to get container status \"918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3\": rpc error: code = NotFound desc = could not find container \"918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3\": container with ID starting with 918d8594228cf811dec3cabbed06933823d00fb8e3fbd8c79e947c5b657d93f3 not found: ID does not exist" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.180374 4721 scope.go:117] "RemoveContainer" containerID="608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581" Jan 30 22:40:26 crc kubenswrapper[4721]: E0130 22:40:26.180779 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581\": container with ID starting with 608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581 not found: ID does not exist" containerID="608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.180922 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581"} err="failed to get container status \"608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581\": rpc error: code = NotFound desc = could not find container \"608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581\": container with ID starting with 608e737c44786bbe748ac3d0bb6dd3c17a173418bd556a20f5068bd58e0ec581 not found: ID does not exist" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.181002 4721 scope.go:117] "RemoveContainer" containerID="52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9" Jan 30 22:40:26 crc kubenswrapper[4721]: E0130 22:40:26.181273 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9\": container with ID starting with 52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9 not found: ID does not exist" containerID="52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9" Jan 30 22:40:26 crc kubenswrapper[4721]: I0130 22:40:26.181401 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9"} err="failed to get container status \"52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9\": rpc error: code = NotFound desc = could not find container \"52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9\": container with ID starting with 52ad4324f330975da5af671995d4a954a16e4ca55745cfc13b2cec93094e42b9 not found: ID does not exist" Jan 30 22:40:28 crc kubenswrapper[4721]: I0130 22:40:28.107111 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9872e4d2-a63d-4c3b-a866-5d6df6c581b3" path="/var/lib/kubelet/pods/9872e4d2-a63d-4c3b-a866-5d6df6c581b3/volumes" Jan 30 22:40:29 crc kubenswrapper[4721]: I0130 22:40:29.448448 4721 patch_prober.go:28] interesting pod/machine-config-daemon-p24tc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 22:40:29 crc kubenswrapper[4721]: I0130 22:40:29.448517 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 22:40:29 crc kubenswrapper[4721]: I0130 22:40:29.448564 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" Jan 30 22:40:29 crc kubenswrapper[4721]: I0130 22:40:29.449361 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60"} pod="openshift-machine-config-operator/machine-config-daemon-p24tc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 22:40:29 crc kubenswrapper[4721]: I0130 22:40:29.449416 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" containerName="machine-config-daemon" containerID="cri-o://19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" gracePeriod=600 Jan 30 22:40:29 crc kubenswrapper[4721]: E0130 22:40:29.577209 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:40:30 crc kubenswrapper[4721]: I0130 22:40:30.124961 4721 generic.go:334] "Generic (PLEG): container finished" podID="83a76f34-15d7-45c2-874e-d44709adbd11" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" exitCode=0 Jan 30 22:40:30 crc kubenswrapper[4721]: I0130 22:40:30.125043 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" event={"ID":"83a76f34-15d7-45c2-874e-d44709adbd11","Type":"ContainerDied","Data":"19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60"} Jan 30 22:40:30 crc kubenswrapper[4721]: I0130 22:40:30.125133 4721 scope.go:117] "RemoveContainer" containerID="48ebdad53971c68c6e5ffb83001b2f424ee906a261bf938298acaf660ae7162f" Jan 30 22:40:30 crc kubenswrapper[4721]: I0130 22:40:30.126492 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:40:30 crc kubenswrapper[4721]: E0130 22:40:30.126933 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:40:43 crc kubenswrapper[4721]: I0130 22:40:43.093319 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:40:43 crc kubenswrapper[4721]: E0130 22:40:43.094217 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:40:54 crc kubenswrapper[4721]: I0130 22:40:54.092753 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:40:54 crc kubenswrapper[4721]: E0130 22:40:54.093786 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:41:06 crc kubenswrapper[4721]: I0130 22:41:06.092151 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:41:06 crc kubenswrapper[4721]: E0130 22:41:06.092864 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:41:17 crc kubenswrapper[4721]: I0130 22:41:17.091938 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:41:17 crc kubenswrapper[4721]: E0130 22:41:17.092983 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:41:29 crc kubenswrapper[4721]: I0130 22:41:29.092574 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:41:29 crc kubenswrapper[4721]: E0130 22:41:29.093500 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:41:41 crc kubenswrapper[4721]: I0130 22:41:41.092797 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:41:41 crc kubenswrapper[4721]: E0130 22:41:41.093579 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:41:54 crc kubenswrapper[4721]: I0130 22:41:54.091912 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:41:54 crc kubenswrapper[4721]: E0130 22:41:54.092745 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:42:05 crc kubenswrapper[4721]: I0130 22:42:05.091900 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:42:05 crc kubenswrapper[4721]: E0130 22:42:05.092898 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" Jan 30 22:42:16 crc kubenswrapper[4721]: I0130 22:42:16.092262 4721 scope.go:117] "RemoveContainer" containerID="19e9dae6c728ac08662a516eb49038be83c7734ef0ec5ba0cb5542050a6b8a60" Jan 30 22:42:16 crc kubenswrapper[4721]: E0130 22:42:16.093150 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p24tc_openshift-machine-config-operator(83a76f34-15d7-45c2-874e-d44709adbd11)\"" pod="openshift-machine-config-operator/machine-config-daemon-p24tc" podUID="83a76f34-15d7-45c2-874e-d44709adbd11" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515137231736024455 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015137231737017373 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015137217367016521 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015137217367015471 5ustar corecore

u/KjDcQoCx};pkQO̟p*/ׄ'OEZ9Ѽ}AjtϻpQҩbS%ن`{jr@M&O/×\h*t4IR""++L18TSGQ< AG<ڛgzX_}`6,Ay`RmsMQZRSM2)sHj#{`XLsx>*)(9z3 +e +5#p4[ܲNҗ ; :$ 2BS ^P62`u1dIW 0Zˍ P;Yk&"RddRat>X_FDM&/l=EV"0@S)}2 lmAdB`K6'U0b&u+M /C J"& URڀ3FgIZ ),Okc9{^P;.هj5=Π&Z;`vTr|/ZC`*c`OlTSh{S\GlwAE+$uv Cg@H[a׹_xqJWLԟ<;73pHݻ3G;7srʅ^s;5/+6 jeUd%+!@YEc> H -[֦LTD\|J4:PDA+ T!>fY&Ϋ7#<țﶲjzsͭҦxj|uUX9..C!BJ 1ͤQuUo=?Ϧ=F6{#.4 DA!Wt9T{ɜ+kB82o]?2[_Xnsw( :dvإ;f1gr73B79&m4t^b-[A+W7`8lV᷋]XF>A Lӥ0&*j a,>56Zd믶 j~H#mZ楔tQu.>-*3L˾h^\l'y)Obó٧YOJ+nְ;w&[#OjSϸn7/I{g|>B2nKۓM%T մmb#4@gCe9a\>d6sʻb.&Ž?.B%BZ!F09d)`冖SpҌRNIwKf`M&ehIm4EmH8Ek,np5 :J>#o D*(cRP6Xs'C |W7kl ,_O41UonfmO@%z6y fī`V+Z;L_L7% Dι :%a*eFoRBSB͘:(!ypʺ"`x,qR!J1A" GB14YHFOmldhU (ej1"Ki%)`V!EP4JskEaDYerL/”Do @{!tg۽*g/ I obƂh:2 ɚl ڑsqnǨf>IG>$%c%E62o"evHxMr: C~qbqԔς(x!SQ+dQKG)ɢ 댉"xlg9ϖ|uHlfznþʇ}<렽R"ahe&&T F/؄ad,p>BNy#x HfIU{VZ@S,u( W] +_At;Nztlr+@1&@ NH PH,8H$֭"C֥7Ǝ?X Foծ7Y[vbW|ݾZ*nAt=1oMklB5-y¢ (B #{ 2{o$VU&EbGiSW3AA/|͖ȩ̧3ߺX15>i!hl9BF]I!gs!ڜkIf+A 6]Fh`K7_VȩܡswXS^ƍn9[ ~-D8Y$&g 6aL %b»)KTu,SzYJcY,TIjiO贋l[\nK:ARc:.`Uc]% QJgc-/0TL$}/-t?'cUAD23m5l懀$jh[- FJF9T^Gd.6DsD8C3vʩOsV LU(Q6[Iv3^f9^8sg2sB>:”杚([Ta]dԉI"*?B@"VH5$}NâE$(g+I =E˦5CB(ubLh޹ +֌Ð$:.-n+@:c-x?1jτv+ ۻ9ɳCe[4I>ku )K0]$:휅Б@5 X{x}\PD}B# !JXYfO%l%% 61 !$P̈@ݖ |A+8]݋yks-s ^*rlb bzݔ_>t!?0l*%ONV``DaLČcWCzS_w2K o{?$bvctU} YͰ/5^^joݜ% DF鄋t_ӽjȢ]5d~ZmA?š_ 0B=ѓ`CN(KJQ ^8^JXD8LA&H"󍕨967εXK?ZsD‡*֫aِx8I;O`ۿckv]ٛyT ׋iT*2+ide5%iP|NmN)9AJm gK%FQۿrA,z( 6kqJQR6`0h ֒9%~rĬV+TYB3W{>ʞz =1-,*0q? tzy:yJo,fJ$%K"FE&SZR}dc+[ 0+gâxDY&L>pHIyS1ֵޯiEagyøn:"(Gf >d@'xEK6,kpc7N2naL6hcC[Ȍ A,˚L"I%eb.jufK~݀ ǩ7~DlLǁ*6[I+ZBUcEr)+UeP6bml{׮FNےxTf览v 칩}b3G|IJ9.X#S1S|s o6.NU({LQϥ)+<#3I$kM!Sܚ›_._BU~9aB_@4 [ó h~{^/ #`;T ֗TaģqO۾˙2Awz9͙buziik'_$/ α lB{6KeKY R"\DGyrZ'hJm%&&Njk!ܫSO,\`"P$ll=EV$92}Q̣\yaOJ$uR=H橯Q,KT^!g|Ly,[g wtOyT,RВ#6 ՛FS}J|H2YH]0>*ogj{ȑ_;`{vX`oY,:Fc9I~ّmYm9 &V]U|b)yaY|1mu2 Dm%'s Et0 I7{= P ]vuO w+H>[ Ć W ڧPf*!795Lh55H̙ $hê3q&1Tbk16*d2q!qבY9q* P !n_2 \g /GOSYC<=d:NOz 3_ӢO{|'tge׿3Bge.6[|埵"Qco$G:O]}X,D5lN{0MG֯/sD|ǶK-/u>^jv|k~HlKh&4M'Wt1wߵ4L -[oQm7*EY;;Q^[!RFS ut(f)OͮdtSAqr;*a։ĭnٕr#Oץ)SZTč7Hb~9M}N+ΚpNFQֵРbthR},|:}"imT.{W.d~,0crŢm-g4b /4lX~7fo; QG暏Ϳ O:<-1v&_>JkL-$oM&?2-go"[`| ;~WypZL2}=kڀu 2αNSO_:.KXP _ 4=*wmZ0 JT ~? jHO^PF8b{1f7B~N/_dܜlGB8Cwu=y 82LN 1'5h)c0<&j{hqvX O\<RЩ:]uʾdM4-FAyusDs\6J9vm-2&y ge3Cx\KF\lq9s,>;5n.RB}#OauEWraqtG3o?d{o7O{!ػ:Ƒw[{w[mwG9qIimEĊW}G4W:t׺%vUkOm]]^ףwG3 7wZ n<ص.%>2]wՃG '; Sei#wgeQK֠#KTFˌɫ!颼*U_de_x8ni3wA읽{;tSo[[6ES oЫo.*&AU3YM#5K V"dDvL9%BٮLvvǜ00jNl][(ń550SIJF:{p[qOǍ{52ǻvNY4Qi4b5 oy<& Usz3(W c4#zc,DQ;U!|B5j?UEB 3DzcX*R`@JH¹CaZvf]DH@r pq>=*g߸l\U~bYl}0pPP %*xcqw?)!))RHTl M`湮L)6ՒdrU(T_ ԛ:+˷Jb&k"=Wi@e#6REEhѐ2"09ʽP1lT6k %c $QHV#iv!QR@ay$}|쒩L:.t5[Zf{2hLP:Zڸ }|mpëiF۸(fʹqs.cLrowu qp `; A+ v/cUc6 Zfr+_&ZWS֭6HVo'Z΂G}oiCjVcl58}hx)vz:8ؔq}z?_՗'լrKk4oCKDjsUvMi!$;:o]͟w-f/9nܓA{9k[혎yt߈GΧb6&:CٹJpR>DWv'=vupp2 xə GS)hX^%ġULSIPlq.fӵ &[o#؛o?WG VW=\?cQ ?\{P$זm0ÏP^4 RZ3ή.ϟ o\q쐉 !k!@P9(5mԁ厇`;D{ KwMml|p4(ːuHc.7)-F)v bMr^b֬ td Ugc}IKΗL*R@o`k`ܟ\C ~9uT_mAXir` xSzdlOc?i:YM3՗^9ui#zKѲ<,B0[)96ɍw0v{D2uC~_ڬS~2O&ü -waYW/*oɇn7Ax=H1mJ73U!x j{VZNޚJ"MIh3Օ+S \"^ 'Cejct:d!;snùE2M=ChNj}Z=e!ɱ#.7xTa3)u栭IY_TaJGZsB52je&!R6zZ&JlJku lڻT%H݆ ȝ(Eba77α0Ic.W[ś; Ͼh/tzezg RLM*PX/![0F@9FlmurK(M2:0X SqC%B <:%j1jNΖ0b=FԂP=ƬKD# l +4. q C4[r$ Yv 53H)(6GQBʥzqX! "r~ѣƃc߈#"qw27)K:"xG5b,b2ۃҖedT,RpcjL.amlD¤k]%+z g݂ ѳT1|{}OW֦ﮊTJL\tPRIcRS% Y3d3qE^ '{=aPE*B9D52iʊBZmDm F(ʤIx(sN3|pZ1Rh v5dKwҩpv((zHr\/UF~˚׆#TrH.rڤ ,j󚧪&yHAN-7{, F湃S|<1{Ct-BߢjLq;Ŭ|IN8ǹFgI$NmM@ŐP:&2*Lѩ(`!d*g݆!k"]*oJ1GBh@@BB)+TNW F, ;ѻ.b\ۨ^Jfm5IU(b۽6dPԢȓFO] !$4 ZKUKPjԔ8X̭Uhe.mϳv;1R&*YTƳM|?e6rtUzJA~>Xqq *t!jy~Kh< Du>VTSH^%M[LUBKץ”L+2G?X5B;[L~r9d2n޿>s PV c ؀Ѽ~ނrtk Z:Ѣ;~2J v΃'p||,ܢ ? ,$A[S~I!O3ش]"],`v^DzH:Sdv˒6Dŏ_uV!UޥJ+o[d98gY{EpB'9FeYt5d{/e,,m,Fr+D2fS܇aobԖ8{d}H9Yo16SGN\Y ]ՐYbv˕6.1w$cN hZgR> +(Q2tŭ@ P~!HQP"^H7%RFH ZD.DR2u Y'c0"lN '%bpb\pdBb%Yf-"A# Y[\!C0͍6 rۅQ[g7%zBD([zՄ4s4cMho..yH,h"Q#Q ֩OL"X崨OT?m(^WQDd AX(RX*& O8QyB$Ja)a( 0xrK:DD9"r RXݱVX !tЧDw +۲)6Y{Z0ֱ$HWbbju%|%jNZ7ٞ<Ոi;Fg!+ Rp>4~~ZV]~'yLTnrt*irL329S t0%Pq1U%"iM6Jn`6${̸K d6`Zgq<|C-ܗo#u#zSNo65퟿ރgL͎KrɎLs:b PTb&Y"4:ƴsy[–w9 Fn3zf *jiRtm&g}y<y⽳_zܕqMd~lWt,K.x16tzFψ@k+- Vjt>`:;ڑꔏ1^\iwR'Qi=8|%2[ sQ7 B`U`Ȉ%`dYrYЇ߆ f yǛ]_/kMw?g"HJ0iN<*C"d@J:cOɤcA˧O )$p \+ ɨPj ҟ ߅nR9fd:$ ,qpNZKhr swDg/mz&APDAǠ IxF<+$Ư,c 3aQ1!"0fXꐧ)|~呑g=u Dz zsL؜m J=p,dm=DDJ/ydAuYM uZl6C!Cz)j۲ e_ dK| /xh}0xtYeƬ2cr".Lctg3'{1D-\2VPt0?9$Cf?mqMe4zqntr٧hDZMwj6st[@iV~RxZVuQX'غ"zMa˅cvNdp!_|-|)ͷח}MEFkyv/+.'#5e})Ѩ,qMd+'tv@h`IUjM7^UVegeU"/AGQ v^K> 1DI (M#$z&{t:. sa5kd\HZL֤Djw"P6 ĐgcK5_Ql0 ?]2kmןUp>9?; Р5_SN ~r&eMI?uxv0*EUP *f|s2ZE,cTUAC!3$S `,^o/]cݭF\04*hXoӗd}ݣyir! 婒IQ^0K^9%JbF D އdJ]5{oj@/ ;rtH&x2׋ҳK4*VpZ)x }ik/zxY4z륢nQ?ǟ-V-'Eh5S}kBI|_"%cl95ZR>loޏHeI H~J4hTWץ^Í;KzQeIW}TfZ/zoBZk7;Glә3ҵvL8L![eWumutJ$#X|% *lQmKuާZл٬I>~U z $\ڊd?Vv0q5#ʗI+\eRK.+^Ӣ>zF ;*⊳"2W$fOW\K% ?*ÉHKpEЧWEJ:p%$8G1rb2<^[=ߣF_} ~9db>M?YD 2z/Nj\B+޼ApmA Ws"VEʥ?#u[8;Ӌw['~%d:~%Gg 6UxU(*VESimmģmEۼjm*Yڏ4]6&ꘘJFAV 2G1,zL(l)j˞, Y ̢ә.T.$J!^zLZ䖣mZgw O)ƽ.f9<ɝvT>ƍ;&ow~47W L!Lb:mⅎUɄV`wh͚ 8OSgTo^4ru-Y=*d2W2 \%mf?T*uYxڹsHQtڂNw Ep*霙Y2V9kDN]v<׸{,5i3 еh~^M bO%lؖwTHTO)'yT49uN-C* IWq^)O 2]./;p9 fږؕhkT\2iJOU= %YHš9H[ `S/^tLtK[*Y_fU$QoAK7tFڳ^*?=?Q[_9+Umش~{UMVt>jtޔͺP26(?Yn7,[g}b3KɒOb[bVBvr%wjۭ+ŨARZڝ ze*2'e1gS]D1Ulg;&Αv6smm(\KFQ0H@T1j *bRŤ39jB72xW}q2&QϬVdHM>C (`Y[&x~+Y%" (:AnP+'4'=u1EĨ:(C)p* $qU`e- _1Nttz "ci# : FVLѰN^!#V[MZp^|oWc1n*ZƷ-zy(DO) } \I*rO%SLA)V,e2~qkՏ:iku;n~[5R2dXAD G$L&g 6%I:qAKI:n0&%)7L:!. Bޑ;^~zBnQr ]}t PoaPn V͔'{zSܛjLƸHȖ0R6Ūb4l(\-U% ]Dj樋¾5'k|fqX}Ɣjz0=ݓN4muSw؝ %wF_ {{2x89%zt4m\ //x!&;@TiH J@$z $r{C"Wŵ8d! *ū9U,: maJ5R,殐:.fKΥ抩 uB LW봡l5!Q7qC^hL^vV7G܃V?3ꀂj: z~ϙ&Ч,VwEi\?B38W#f5@ ~^ǁ Y ONTMK%ˆDa؆ qWZSkų*-}S(ꬪe l vA⡇GzL0о~g=bż7WrSu Q b bpu9ںtwMin|:MBܐ"0 1bk'Cn/6[Mzh}fY8ZwSt'WFڻa[{m_39_E=UTHAsIm*9DB$WmkѱH6KW_\Qv`OeTr--?TkxWs'b"*cɪX0:ZQ%9%0DF i]?EE̮+n GufHb7XgC\i}BK#nYBȺ?FMO^Z=~l+ut ?ST^mUɞ~)GA] Z"&&صvU:(Ҳ3A ޗs:ܷ_K)VNC(j#skKԊKP+D5ڨXI9$fҌ O %jvQpwq]cpj*ow8_5Sw RL*Ƃ\g4Sb*BQ\\=[ACeS24ig$yCW{*P5@ 5HC^zQ˱xn1&=!ؽ-R,$@:bM; I@Fșl!nB& Fp`ա9AHj-{5UF&d(>JވT'+}]b8_r,-f:{D<27"3DB1V%Բ{--i*HEK}9=znnoL*)'2l9 6,RCnݳz7g%a={|KFm7>;Q7Qꖿ=Q53Gy޸8wwP;"bj ;SQ,^-RG˰)ZI80.o=uPN甆*o@ Dd )rc4+%^5T[]n7r aT'+ 25&=1=H)G2ޥtTTB"0Yw1\J&DZjlRLB 9~ŷ? ]mBT`MDbQDlD>{VrY`HD j7q'19//#>\l'bxbv'NKBS CkQ?@f)Ac])|,U܈QT=>OLj_fqݫT:&WT&-H^95FdgEIcw4ɗ3o zΆ7쁩7S)^V ^ 7SYD ʵaIe FwO/s:0mn>2rۘxÓo3^j??*yn&?0qz1훞8]iw==n[(Aj].T6,qk6q3qs?g/Ó7mlX- ?C +%6O}ww/`ear]51bd):G]S9 d "ؐxpZRX[~UBp u( 1VE{7qp珏f9^VY[4z swcwENXI0ֵe#mQG(TZ] >U9[Ղ3iWSHIk$:ķ":ՐMƸ6Qtss؂Y}ڥ_X>?u{3ϛ3$ǁ.kޱGKRrZcN>9CP*|؉771ԛw]3/5#vú١S݃.ptlXG>[VcWUFNI؂>%('ptyƓc9C^% : HC2+;k1, dl۵Ko 1Z̥gB5Nӊш%vEdt̽vcX&;H}W#bt϶2pnZ 칩>l+k>AT i0mZyUgR1FqSbolXZxzv|4S*'1GJF< cI wAkU|Ql&kXLip=y' q|:x[EU]DoU+^P(T1DU I ( !&f?7'a= E7nG^ěo:\d4}p7u ٜRLj/c@cQiV7cr3ҏP4#NuY6eb8iy0+Q'a@> $:xtV?76O,&t2zDd2g[NM6 #`SXwp MrKijxдw5Rhuk2)k!NXIi_w 1s@.3Z/*5;촹7p<Ș-fZG#Z%% 3w-:u(mP+LNkհo7cM7^uώp3k~ϔu|. j  jut)Ać9S{ߦh O x[yÝ51@ۺ<5@nP[qS(%g9969ӏȏTgf^S;_ţ1@EN{d_I9E$eYAV qpb{Ȭ0eT#6F+NL)Paca OœTW8GfK{l#Wq_dYRuUds=5<ۥzA]ˣ7Yt> ͡f*.Ʌ,ȓt!1 @ " Ȅ xn˜TYLQc2 /jsOdUNSRd.U'#bx8wid~gA͢+?GZv?WmkX-{?V~/ta_uѩֳOU0FYU2lbx94``}BK>@߱nhF~Gu ԫeuhqY}{|GK,CSvXEJ2M{/u\,t{UdB=i?X9nPvw/8':ΗoM_pы}7Mø T҆)L_N[x9cB{JR6"&xܽK8[1;JM&&SZV?b!aGփ hK*/u L\xG~o'pZ+Oy"y豫z?,Ojt0)Us:c l5+ ^l-Q+ RW4jPUpU1uxRg/or(WfZ]~oτ:\|c~b,gII^DUӓ~ d$GҦ8C-"~c̖{$߈<~hD..u^ЂT|m''m9"S^Y% -ˈDݼהgL0+-Ӓ.HUZNZd;dv:xאsӟ;]ӟ ?q?Kx>I9_j}YH+ÔAw5V^[ɠz}MwJ.{d4Fo54PiT, .9TDEB3Z~7Nx3];6WK^S: _|}D *W'! )9AƲkYњr*r61A Ztۈ`p>MPαzj4|Ϭe[#7B|4j~ 7A[ V!i)VA$q^?Rly5I=1]uF :!2rrNd.emfBޒ6YbiWt m=Z9&!̠<RM<[lG~ki~x㛞n'^dR-[80RI'Yl9LI&njo)Hg)C6pYRJu-)"mGs A0|L&DÍvzx+HĜ07|˖Ht4v)v7ِ8 "h/\;a,ϴ7:qͤT.8fMD/s;kݤPbWw]el ޽errX G NoPX*8n/E\Vw{)RJu{y^Ql+>**v")c\Ac}BpESpUE<*jv*RZwW ԯx "BOIsU%+oFHNl8=&/`~h:tw o?*0=s5E-O>n Wi0ս7qG.S3RY$~0m}w9M0Y*Ww1Rh˩g/U3ů9eR/؏ L I[98ѥ@#ˋ{e`']7ypAX??|x'Bm3BgUe]UIiu9Yw3`Y9+ V^z[e.ȽNf`e;R*Tތyv%"iҦi[*ZB5 d9: =L#{r/Wzr//M<9|''ԃ5Қ+X_"kr"ŶI qd+ `|$ઈTH{8ˤWhWWQz? Q ?U'~ѣv[=JTYD[!ST Q$󮷒ZQgF^.tA YY FG"%0܇\ElG`7n̊h|3T?z}; ?\'ltcRŒ0b/3ZЧIx7uޏt]*MYWq)yA=Io\.ubfP o2״ w]pW w]IVW wwŋ]pW w]pW @_X p59~]?zCwл~]?zTdJkuB$I=,^$e"%g]L=D΅xp.գ{e);2e6F{fnJ~ޗsvX\.zبX6K€_!;gS~KmR2D'R("c"Ι\,"J k#T+~&oIL{<|N+E b i/Ӱ7 BS}BYk>ދ4ux5}n8rȾ?WlGVnGzcqN/<zܺ']>+`z6*7Sgc[uEX;H):Ʃ^ uP_ shM֎ʇ$*<])kXG^hi[MNOj vGNAR(ZH>D$032Jb:z$Xthrd{OL*7n* +rU7ce۫<ڎ??S-άrǨq6UG%O>.r 䖖Obdw=tn؟~ї0P QHlb)ഋt~yΥ^vKF˔r֨/iUE!l.BBOvr+%̈́}yDMզ%Ⱦ]>h|O0ץtDO䍳uYRoV2ӛ J >wfH'u _I!|{ܩFjoSih"-ȮEɓah9O6du 4.w\EFet1{J2&f'cGNQ@RM#cclLIr,lL3vB0 _ MoK,rC/ٛu&w0L6? F_e$فF%&Ҡld5F[eeݗ1"#=ș ^`S(zNsu&w9ڄVHBmޗiJۏqŹl j7mAmӡv`ٜuQ&$i:r܈ř`[F0K]y ɋ"išXPyXuS7NجtP !zB^f-9 7KuMEŸlLKvE.E.vj@,(ǬZ_zHȸkxi2Z#B Q9Ij$!x˘\@{at1q# myh}紱2m嬾=kIOzŃ YeQ|񟞔 mtl9Fjg5.Uź1hc\;mr+b570,$餔 b^XɶBi2;Z`d`*IzO&5jgeGF-x99s-7t*AfHR> hgK;kL})Er4yNZDs )*StɔR ur`Ur'ju3&ME@NYX*GNyfĵ P$.[P~ E͹ZQ0t~R'}*%֠r@a 2($_҃z"l$$*m3r}3)2ef*DC:c,Yx0DKg͙ǒ`ٻF$W 4qIy;g ~X`ژu#R҈M$ղfFjRQ"[%[2"28ҡZwF[HCfH= tEǥkEظ7\vOߨ Yg°Zqn!"EP\ZJj |T*$] cBv j=&|\yH{1|&=G"YH 9Fe!+ݮCzZHl9ddfHNbp.ifHQlur*I}xBLmC0oCe+46ӳzZUNuݯͱӎH; ~0(]>=bbDv $.q\o#VW dh[iґR9sӌ^Ptw-֕49uXӲ j̶_P_` :Ui/jM¬r#.+xϻNs:σ~=)ap`0gφdzٹ^~>ŮP'Oq,d *4Ki))7w{gwIrJ*T*AUsߘRF'WL!1ŒR*(֮%"i-,>Br;ͽ@Ȭmvχo~' v)_vsSf'Td̂f!"U# $O K%{*+oMp.H!Xm)2)Z@`wv΃G!%lzB6rу1'"NnhR丘y]rd9)&z,Ȳ?ʼDd*r5;ʼZk2/Qi*ʼZce뎰[lfKᷪڨI}ԜZh]KgCԄt>EW::|AL~;tpm wJGZ'R)"7 QLP"Zˌ1YKE;ٔL>zaMXtv<w{s8gYlʎl+L+yh_mDEB92V4JL<*DȀd<+ _bԒp2!AT],]k 8dh2*!<>9eB( ![BǔA"Rɱ;p =' `(h^e`h2C F|}L[Y/#!2KST__OK=k=ƁGFE h ˌ:IL$it AZ[C1w! J IW@a&,J9 D$ CV> _>ID +8c8y'Y7eHķ? lN@eBֳȂu1RJ/EOeAuRߚ?:;_#˪UhCq(6RڶvgnْZ]B~xtYe+̘ r&1: ^ Qd9 ЊJۂ'gU9,s8&n4ܧ#:IROߛS}׿LS fDxu`b і-Ng 1W4x` 5hJ4'1i7X"F6"( @?옶?boiW\PG m_~\rf[i<߼9J9}g$0*T:c֩4.#wzb+z5wْsw<݆)dCW֗v*b"΀m"[9AI1_GՎ5xVϫ@zYzzc '>1J=]*m=k&Ezpxg=H|i>j+-PgY-W7.(s)n* n}>Ოf_3;l9"?"?O{b.+ѐ)ĥ66&&IaVqmb6L(AGYbKBNȕ3dXmdmhz4vB{wtfvYm:{u G!9@ۛ3%{HRHfOĸVo9ڊxިcnDƓ]|^Jd,9!7F4],]뤊CPE V < džq/c9a) 14W,2$4y4!G4(_U:T\ >߄GqkmMb4RIl,њHi4XZtLY[&DjM R矡m !iÜ!І?$ڨD6x΄.赶\Lg?)Ts*ҏ?U?*>0a/ף~qW?PLz\a?5)F.b-jSuMۥne``jt?D`7Z$}E}/.S3E)8(o a&?=eX2ۍq(S[vvi s=ƴ}>Þ~[KŽ]I AH vu}Ja П'#ѷ*:~?ke/.6r7*è)O,#g 4)4%~g~ DK^}=iXHwfWluug| :(>23."ԡybYQ,۲쥼AyDZ)1r& P'RvxY$;|jZ\O<Ot & ڏ'X|TaSJKexx-d̑RT;d]f^wWER|h{C]EC@ϩ5D9-nХOZdH1 F\ WDbP \Gr\9NL꓁BWD-08v*T 7WjǥWgLH2.K`.:مҟeŔiEÂ;$#v`׊ %.Ux2D@ P1[hpfpŭ]%MVڛ^;xk#x?rx[ޏZFZ>T*`ZӪ.=';!"7'W\%O ow/*5s\C* s+";* v*pUUPiTWJ+B+8ۂB5WD8r*T{+ ũ) q?(L/?~B0,? LY$WKq" ?(/G#Y` ])|@]A t2tضKID?v&.]|i"הƨV㛛˜w>NkU5-w~cOo|4|oKaR|xx}IUAwW^kRwΛp+A۶ Q*05݂ h >Day#-G+'0l0˪С)aē\ K M1' "#hI"~B)c"Wd\ZJ9YK F&r[#g{b !tT ^,dz7Ѡ ͢@FloYVdT `2Xm28X% KaMVvC# ;#^÷꿐1**ljp L9{cQ DG畋s,Ʌ ySdPJ&ӒL; P&sM'rShgjFWL<{i&SM:)cdc3X>ڮN] (-Bi/~vAGځ%EV}+cDm@ ŷ)D Z ɆLu P(TC!%U7yHZԍp+^WqRJ<`FSEH, %T8 d-.0@KQ ;ۿh T.R*$A+om( /',eZͨV4'5Q t Z"(Ha"skakK=( 6掿.pU=f6A iZdF[Ƴ ]@NB((ok"c>,f&rηڝ=$aB h,s:YPnOdb 8iYIyG %ge4Z.i3&A̽sqzvts73˜ M;3(;LBjUP' !hpDD(}kJdߌ%2X($M`B R,e`bOLNǦ$`V@EOQ(ё ;/iw3̻{8֌OE>M,XIp>D}V'(Sĕ+W-R'Ae&juhD'O!r@s~bGC߶DA_'T,ٔ$XQ e6g#t.ڛ脵M*0p^Tٜ3D3]/gûnw mvP݅Jw]{gx/vStnqknz|;'g:]y>{;\{vxnz~h|]zh{[?~Ο8z;ۼ'OzO4WjzhnW mRs>㻜s+A6GR!]3?_bē/Ny upPͮw)1N`1/yvildk/R6ͥU7\5+ "[km2* `OF9cB%QPBP( ^Y@>"vo64Mo#wf^&ا^6X:Ww_nzE؝Qg~z)_t2ah.ih8{K0EG!IPH^*ʄOA6BHi{(hA*znP/"EM|~و/Ft_d4}pOXlқ` e)  b2 B9֣oOH@oJ;(iӥu->On N;YND$=n8W2u@ˆ'2t2z@3nse5o0efo%S1/4՟L۫#+#rꒌui;M蝔=;_γx{?ņ{EZ;Hspփ$ Aw I>[qTr$ mj$!*C.1d1$ VS!Da”gHd~&FB𨌣)3]MQiÓ`#c_t mZ )F1FQV0~bv)ڃ>/땅Z[au_(}u92dVK7EejE.*ϣ^N @KGܟ>[|$"^ca&y~v_#쌴Ni![~ϧT%Ҝd`֗v&mU/RZ<%ŁWG䧰1ߌbY .4՗+neДGRm1/Q]H|磿-֋guSf;ׇf[Vp97a<_//Z)yBi~Q%Y歷W Hެh>Ir. %kӝt@kӧ08i6i'c|o݃`;J(CbF h]bÕf|#ooKSMQH~ (_Z]觊Eid//Z|<~@KEG)4H&XY5Uˬ>]] ՗<^d~4橮ZԸkRiË>by ?j/xEџkFkh߱],Y,Ւ6^^DIɛI`-RJJ,b~Vr.amĊ?"3=%d/L ZZ™GAH"KT%kɔavnQ aE"S@)<*,6UC38*}G~X5KgOOk<0O<ΛDgx5[|f`%o]6k0s7tݸ43L1vECgMŴ%T& C_U"}6K/>4:v+N?mp!>/#CK٣@2Ȣ#9S?;.bQFICFcV;yKm 6Jx4|MWzQ]}hl΅?{۸eПfM3 v6v&;܂eIN;{deK6%-λ_ǫpp }ɲCؾi!|s钥f>Y˽xU% ZvZb@m)ViTD5(5LoѲ65WBD ι/."{˿Fxʉ4?|iUձemeY]G`093ҵvL땘dB9j/5Ng@?FGfaf@>CuKn݇񛿸ow E~p;$$O%|?aJ;I)DzQQ%We* ^VzVKywLJ_}ؐ%E^tx̳* m.smuWlc/64hp_W믪^Usu8lfG{03 |O?^ݵUv{0O]syػGsz.x2<5 p^4^h@I˂zN !$/\G ~s|&x0.*uD$:xB6 3!ǥpFlt@4̫`]TN[HIV(S$F:]Է-q;X\ƣA?ܟhxhfK\K!:h+;}b ȱBA3/9O)M%R9O e F:uH]v"&NF_O ۓ5zycsI'~h|ISKv|2H9-#"ȘU3!.8GEvzS򶢼 {{ Hs7&5bV׬%䜵v+7alz4eS6*jH/D B:ި @hTBb Pd>5yS>O'ٻ÷sK @*!:#*\ZRFu:%D*EZc{N3J.9^!^["S5bSqo"a+Q=ˉj?ݜ_N'Wo2'I^O4Bu5!;.NrNu'䤻;^P]:QKF)^{#bvns"&WluO&lx ԣPjD=2:pBFQN8e%#O- &=OiO'8ϓ`~W[8Q& #fݍϧ4)R3=2 LMfILW_*)g8[J&p4ȹfi9;,]; R3P`l*KkUҊ!\i-7gWY`s>i9"s,-էWYJf;zpeG* t-BqWYZNcWoԎC.= \Ͻf*M!ާj~űW_7$?}MFӺiI9iΑ1+I/QpTaYXW6uayj:0<)퉱J VL=Zw.]Y {tGo^)y7,Mm ~07cҺb8ן e/׀6xUm^N|7?5U3h^5jFuЄx?|NyYu7ٵթfKY,A'Nh6ʻPeUgdd 9&kƢE<՝E" ӌ ,; v.pU'\x+3+XQq6p\*K+O]e)5]•:gBƽ??W\;y{7Ѿ*\gxG[zzs3 1"h %-Ҧ<mޏ^O n١x A4&oq1hZ*DK4:˂rIФyJ*[G]~:]QHE(PhK]!g͠+W~(T*O< Utb^TaQ@rSI+(D=E]4*Re$3H <&n [B@ 1P ༷T,Nzg+j9 5qv]c)E}3KN'U5yq7ˊ،٨$t|hM}9ۊrlM^]evK*{ף/yHc; Q2Vˋqνޘ?!ε!.uuuQ=>ayɃ4e-f;ox=/<&цfG7?.s-?X/[:nGǹ&h7OϚZj`MVi(ۼd Ʉu1m1 :E˭ zzm.oy]1onĴ pU⢠2BȀHĺc3;۽;lpA*hUDɘ)C 1&:I׎F"IRzIڵݙ`G7A0-d85>KѼ2K5Fl,;21cXR9/  sםNHyrv8LjıszTs1!82ozIt ^{Ƀ1DkОda7W 3g%tn=W9ē~(;_-+uXv¯vb^Ϲm^piu@HF6vd$DD@6zON[U(r3;`5A BϔRsǘҞgaϻvާpk+[>V#sI$|:wVlSkD˔=[Q- $(PP,Iq8P߀'Դ;ixFAZ!OLU XN<*zKCX0HF5<0Q+#k=꺎$шǘ(7)"*ybO87V[8N{i}kZ)e]ktvWqdhP"Em[v}]YBėM]&xI&Jp"AoNۋ*gMҁ7Z+hKKȄ&DOxn #2ݷ6DAƧC!IqqQ[%QLL[g9]>Ǖ¦} \RE%ho%6$ˇw=3 Ryq邓U!Ց(CqyZ1hp9h$SSFm9\ s(DhϦEK,H< " ov2X)ըP@q5@lXM(0:)zʜEP$@‡kVVh,hJUPJ]$ &v]  )es`Lh/ZTH׌&ϸD(Svi?ăs|TXz JKoyYJ627;o>#?; n+"yUh/ƪΊԟ{?y?,|0HEٿMm<9I1Tw;OGufsOYPfa\e}P\E aU?0n*Q3{iV6AfP\|sNk5M~[zJ2U^UUZi?|U=Y<({wAuP~ Fc}io[U)Lѻynr @>i|l0 n_\)!%Wz9?\?`W$]nЛqt\bG6|W= NǓ$앹GqA {yۍ0dcrq甇 Wp #IB_cK${F}8AT˲iR!VϐEM5=lI3CNK?T5sV3@#GhGtP&!EuJ?w0~Df5J/zxZ4ҷKE{E+竿6UN-^Ej۩Ʀ#Lm-d{k*(>l],g wI}?3_9x*Q Q+0F ),"|SN؈"iL>YQ 4ѧHhz΂$K9IvZYZL-t b!k!:;fafst َ~X'/lr9`k'`k҅OpK0 O`?eKsi]F+LBQпkl dG(gZC҅?Њpܣe/Ҡ B]sZKFB$K3 :~hu|ko~n&yJ\xk2y,O1cWjF"Kk9 B9d,H ׹Yqɩ q>ذ3ԷL7.mК-JWԮGen4z ']W;eN<n~l?GBvϓ7Ɏh-8v̮|K'VWeEz7\_ǒkڈ;AnYFpot(tKD z ˬRӍ)7%. Q*S/?e (^u k tg6i= RÏwMYo_xxlȅS٨%hD?Q6y6z`?B/./ QF"Mʘyb MLh d.fb$$|evxzN"**.%GngU*RI |QgY. PXc%7G#bhKE<]Φ;Jo^x}7jg˽ܶ(|;g=~J7dWM[ʪ;tU GP9Ooy@Ge|:SR !Ͳd"d&8d t2RE r.jOA1RT N }Ӂk"l%x-G@FbTZԂJ啩 p7T5w ׺%ݑo1wE۶$lPryo*tR sGhE5#5I T LqZѥ ޤ 1W SJ9VHڔ%z0&eD*p+g+9W39ڞprW T4P_h+B=£WHN?b< ?OߕD/c[E1 玳!i%u) Jpt9FAU2Z〣&( J`2!EqR!pduYCp:G[UGt6B̾xjm= ;c6|rTrȝP*gO ΌDIaPN[A0]` !QLȐEQ!_E-Pب 8>`)F}eXm86_؛~]e8x;#7e`V3pFb)"ӜV{)h \"r+B$T䥫;(6ƒ6wB)~&-2bHZ@Hm^!:_7K^g5-9/ Y/ >/YODØ I!f2Ne>] $;cžjq?Gpa~;r~ڲyngTG(Z$DG ۵-׎U(F/cR^~J|Iק\tQ\С,bJ2Æ #kê01k05OD0*(`5M/Ez?/SBDP^;%ȥy~峓-%ԴU jx8>$Uõ֞{neI3'Pr""q+i-2vzWa9҉XX7߄#}iAJ晁T di-ôp:,dގjM EMJe59$6.*&hYLLi<%Rt>J3HFp@ Q<`Wiȡ֡*[7Aea~;H4%͊B{uN\ $ mnEHqOčs Ɯ|N~ӷo7P&D;_c*ۮ!9}Ţ_o[Ê Ϊjp972)I%ȩt TV^OnWSjpyby^gswQGM>=pGh{*rBoۄvS`Q)uT)Q٨`$Dm[:~:NuPie!Z-*eѤ'&=d=frGкl1JTu5?;SOړc\YN2# g`Vj%UW󫑳C_k OIgEԝA(2@qb Ai/EHQk4 y ӊo^r%-#r!JSC&!Xed/BP(g.dɱT y5rv\ I9ØwB#@&2r<eژ˗ lҗq(X]u}X[yelf97D”5K/ LG7>8ʽefy5r1s.eƃuD3)}FC@:,dG2V1>]_eK~o7; m-ArnLٳGOw_~1+ CcD Y/h΄)v̩4D B<(idNh>p3Z!+G/(3& hN=19/+҉wA}A7Y{w+۽xr:b_8Gng2dOڨ$NXGς$Yr$nȣ20o]$o6s~".~her|H{͉ ؃7$ӛ1.=6v~h>*HT9OW2JD>XC1>j|?nr9m`|ʶɐ\#6N:@P7O4hTYcpCrB(5icrg\uDU`CLAr)Ȅr>; BWFٱV#j4ŏo?^N.g?),Ii,ɤ YO!-M#"J##FeBp t3\dQe Y)GP`-yjAkc@Z,x~׮,%.'<~Y>5Gᓍ 98/*\߶Ep:jl{@gwoo.TIQOoMkr>rq'fd"_+^Ub"j#5I T\beĥޤ 1WՌ SJ9VikS.H`,&eD*p+g &W9WgRR5c5rvU TӅ8c_]h+B=ƒEfwl3^1< ~A㛇S"E)玳e $ꤍSV*LyV8k5A]ݭ Yur| B m3%ɷce!JsD[yYcIZs1qG_`lrȝP*gO r xc"gq#gVn!LW7%+m| yȄ $/:5 Y#(:Cʊj4WևyYN2(c_*kD7hA#nܔqkvIgK9Y{i !*rƴ]֦B(Eτ%xB^f-F,e,UkEE.:^6 Zg5.S/ v]B0&|Bu3`gă`Y ż! Y:qǾPVև{)@j!wu5%ܤH7J&w [Mr?  dy1Hb;ȜQ:ZFEg<$ly= 5'FbІ,q t"++%X*Y5rvVImJ!KyR^9)fsLTԑ2L!bZiQM?@MBQOe((:n"3Z9hVW>;`(lik@1p9PԊ' ~JxFZk=72b0I&)dlk&HRK\UƎ?7ۆxbmCYU(V{~b%Og&Aq_OhVx~@ h0H[ۮ!4ѽQ" ,|]6.*&XDP^d\y+&HFRH cz\CUY:ԏz?-5qw0]Ҵ4˘GK;#QZ QP CɉMJxZR$MRS5RLjHiJ$UzLWtO6\ͪChYBmͻݵzkYiJ0vt97F7~Ϻ=Gn[|ڬwZbMmڔ|9LXWvEb<.2DC`7w5vIoL wZe.J*Rk&X};߽ݏwQ$+K,Y9+1cNd$kG#$J)$L0ϣR 295>&LeʬbN OD1eb[[#>.࿃ 7ok"NlYonZf|}Y㿨e̾u:Aiƒ 1eI6bhhomȁD*R( ުKe;TYK\8q6D}6՚1w T@] \_7Y@}|u.qO1w%gB%=p%NԃDV{r(۸+Jŕ*5F(XN.Jߝ~~<:?R}9u` 8-G^_K[=F\olz2e|9?yl^piuQ# I2IdH`Ja:<Q"0 3%@(TxtNsǘ6 = gL2Q;۽'j 1 1RfZs&A89ȝS)Rڳ20F~`;،C"SÇgFW*Ojmv|墼i&s Dʘ0 e)&K2xJ* k qL%A҂Mfb=[C׆<1')9#whQу/P*S,!=$j;iy\wa|A0rԃH2?Ld ^J,YGjK(1oJ{I}kRX~7-lS }D=(j۳.Gْ׍6;?hz0lR#)Z<6 czk2ţaLP#4=^x[0qL 3T7|fEj ÿ́MlRg0p.x4k #HDWg_j_+Pۭx$ tA> b}I˖P^D, 1EOT6Vz9]g'b'N칦y}5[ qn29 Bjs&"i!2GPfs0|HBt!|UU xBY?PR?[ N8kQĮ=]> sqn=k!(=iһC|@_֐Rx'Km.x[ '\ϗte;Hf;#5CtkF{-~ctUKK NdBi"2xn *ɔ!)Ԇ(4 Z(c$r1.IA-ђ(ڮ9 hz4~JM7Utvoٖ{=P:*-Υ N2 <:e(71 F=F8e8eU :QL5-_O,H< " ' єj@%@tJQXy * c<)s M$$$0@hevsVQPM WYicaaK&,0IL,{"KV˄NE16x(0G'H !? ̃s|TXo<Y΀62L?edMsE$nZ/_Kirڿb.l~Zﺝ|F3ZBGݷUbL|ɿQ"6jwc\U]籿qB^?>O[֟-~E9XO]ƻDR`\*V.~jV q" gSPu܌G.{cWE,^N. Z ^Y|?#Q*:4P,9&d?x5iw/VxӮ+$]1,Ho(?C`U2@?TM{ ߧAp*>ꀮ:\ض,?4j4ng}O7ި)F -,ǘ-|#Г6hp3޾0nJzyLG$¶x#'+e ,p;n{"IH"+ƨ3*[%K]`Q~h':<Ňt+'W@0q'Q cN()P  vBp{2p2BNZJ:zp%=-BXW@@{p zpT|p%D\!t+$WujspTZ+ET5'WH.WH-;zB*yzp[#jpg eV7?+v_djޯ@zH[>],F7ן'g>;zS9 f6?TO~Lo*w~W[ z'$eRB]: y ki{qf5@*ZY_Go~c<  dŒs1P7IXs'X_Br=aa4ee\g~V|˰|JsI;Zب" E=ddKzkJI=-C%=T,VvR=-{!Vǹa&hL>[>p3M^)16Jbv2H2kgޯk'L'p \c7PVki?N0Y)]*eZkYZaIiMʧi󯋳#CA?q-&dHpdR=H2QҘ-U+=v=M&RӅm, {ɒ/FZz@rdRV*tA B( ZžIWXЇfנ?W&S\1N7J4Qچ{kXߓ9 #-Ko$A0)W;^U A*Z4  \5NIcP棟=e'ϵW['ȾfgWU_9ʢ|-1/n\@E1HaVlYo{2ea[4MhII)K\ʧpSsnV>ۜj2+CVM=P(L;P(kN& 2zPwo2j(TOh\|g9MQ7ꥅ8@԰o<| v%.Mš }ZZqi~ t{Յ_F Vd O~0o8J-k|nbzByWL?cxU׃^A}u͵ "ߜQޕɫXwJKr.[ u2_/ü0&G㛁[Wɓum[kղ;em9(z]s[a=!^H)HTqoAwFxa>ut50] %D M&DgNI&8;W[ꈵX룱AEœ햕^eC3{ed%2YRxT.+9 SQ$J1/ oR N&zW#T*ل͔%3Jjg0!KӨO5s s4m ZeNr[܅$^׊<ڃl̘ v$%bT@TN$MO2e FN=S,e&im W!, H('5rUOXQc lޕu$B lɓ> 03b؉gAՒb(+"}=RՒhٰL~}TU+R9QU@Ly< " L:pױ@ILP!_9R(>\"Q+P\qfJ~!Q6jZaNOZ)yE{sAZ.ef9P  Qp x&H dzԠ6穰+8Nho1Bnm1pIШhلHEZIőŭU(fEʩr[.Ont}*WDe>Qļzn Ji$a$纳@$DTS1ju^GTޢ>@U=ʱUa8?(F!r,k4` ;&UӤ5Y uNP~nzpi?㰮80jW9hcae3ъW`z([a'kp5c[i-7?C24u}t\; B B*FUف? c\us lO=ݤֻIֲw̃>䙭[99ϘtƢ%ܻD!)ǀB6Q:8*kA59n F,h`*xk"&DHFE+=D g&*Nb+? + ҜMQt} qP`򍡞crsިj&ͅ0߱ǏZǻTfAPܨ="<;e9LKh TW@π~5#|N{:#y!ɴiD wdENG1 Ey ^&mq}`\@^QK'U3W_yl^Ve?p2&bfLyb egzJvkiOdnx%6\^Ušnwv4O8}jUn_0.⌘J^N>|(`b^H(*P4<Lgz&Ô!KĖ51}ԵV)BL[ )42-\0YJZ\ eGuK 6Y٨BH6LkI@xd͸^kbBiR#r%o-I8BQCnL&QEm@4ƠCpm 9il矋EQGwx"Sr< (H*2L1R<^11l")=&eLL2=L0Rd}NYMJiT PHņ[2*½KQƱ* #sfd!Kl$L:/A]|HԡH,9j)JD%(ؔ}C*jkB( 'ː=3βJ%DD$VMjkBn7ɊέxkJ *rܖsa9@4˸J3>`jLd"dpvKĻ+(j\'έ)/+b'FQcA\2 Bu`E͈zFtQA1Ss⓰"N.>\<˥Xy( ##cA-<6d'pӜ{?kSۣ!nM@Np]HPu8pprf8m@jܾEEi,~ϓI.OHhCS{3G=R7o_f8sE[߭6<n+یϣr% >_FۗۯvYz>ME~9LXIH #:IB璐6VSOHGd+LHKZ|B|BAa ՕT;Y9mQdmۮ{=o }By*tW}QC9clZNHKEPڄH8aB  dhTuQ>piXq(cٝV (Ap *V룶4EKbiWTP|eGwa@ N2ybu$P<ެV,`H܆ .R0N7NLWG1>EFu_9iVU %C S0d5T82X)@q9@tvrI P~l 3;&Y[̺; Rl SVQ@, TJ]$ _hڇ|@HZ0V˄ Cp͘L6x&B)$21;6;($4&x,(-ICd)QԆKE&׊*=3{,kaYθ"7C{&6viu=E|׫r/xL24nݎGTEgvb9 ?7ib 1>j'!yw=7pd&7 RO9 |nj S)I<6w^Mi@bc=7kzjwaѣǩY 6f%Lzû. ƀһMݵy>FY# \IYS/_^(!9'zV}>®^c$?]7 ޿/.&ed"pƵ|bp"ޯIիsTa4wFR+)g s7gзFpeG Sx9HVKEk=.uyFJ?g9ICT_l߆."M`e:<=rQѴh${q':fo-#Qg9*U@ znkQ򴻝\q3!" 1|EQ]eod~W59J>+F/?ϻPYz[f69o4酎K-3A>krFeDEH%(P(DY+ ԍP16"Г9o/Mpo ‰g٠DRDye4xXH@$uF%Kdk܎NūM[4.ΏOs2\Fu| _ǗPŸ`gO.Smn|X5Q-Ζww=B9fp~w4zhl%&V j]ub?^A+oc#==";]7v4C"Ylrzo޼| K:n{H]O4My[@ZĚŻ~x8[T M7jGA1TwQAWT_R-g~2'w |aUyu(CY;Qv@ gڇm^ iˀ\tYN)E`*mD2ޑR\tf=yB>$} 50HZI@uIШXJ8 #^)z}R ړɵx֨޿rkuh0|ƶ w{`3 )c+l]afu5m{\AER~j6b [}Cݝ"r;=-?z0}h~ K9|GdzSwg2-^v ҜOt_objKm2j4N7ݜCyD7.\!>ɮ3 7bbRBw7i2Bb!4.8NwtNw?== F9\ X))1dcDyh$D)<*% i!ĩ\E9`*Qfs3DbK݋ gO}]F_OlL1ڍf׭2 ViExr雓EtҔKX o2$GH.6ShomHD* xo@/x^U`)U: gPgMO&0QAqL+]pvsZqVӦg-u6qN1U%7fJ%s}:]j֦mDܹO?ըpIoLS)Iv97 L%޶? nU+o^i\-n&#cOM]>S{DޜY79S$C!"DDN &18o5TtPK!RWoc}sJ_>NSO>KWw+wj'}{s\sxn_%C։ .kO䳠2e(@ 2%)c\RtbMj|0<0P$e S ))vQXuIdZ-;(:(0HGU\ 9(U8qj.I!.F1a0qT#3J`jUq˟axxGXDm4CO{N?|qq\~o$:$,AT4-1Fuewam9@O/KCR9ٜ s hǖox) W`Qɬ&3,L:Y34I7 p|f{",[/JkA&dGER⣖9R~0Aw;cK-zlvU1qhd!EΊT.@|iofR]LWO/Sk=s e|er>聢:wԖ[:S{7Y77nYiYiZ n{j;}bˉɴX#'ӊfx KHbui\i=~~dzJq$x퀪T2 +ҙDE)bpIVLfhc,8OܱuMk47W<}hl0 4t`_o>LL+2ˤTf_+l!:0407:iGOx#NA>f/Iz:]\ep֨+6P]s P6-J$)pکdsFx0{/$n g'.&C]sIIMcDKgpyv}]^寸pv+-+êjgRhĬU)D Tֲcl( kaw 6^`,E6 &c[m>ɠt3M:`T|:p頼\ jHLc/G=c+r͏S K;Wlrm4qˆ/SJXQrT ;5&$T5:mjTȑ#Pft̹\{9IW&^4KyjM4`,oջm1H%KlOXgR IAymUrE"X!r`} pdښ.О鶣|`kowXo37A8 Oult!Nns9v\LэMkFI{1,/+q%9jmrEjrݭS.칶EG'Xz΍S܇<~iA«-ma/Nxћ}4:&L&ߔwLD9fIuLYq2)2* %mƥ䭻WsZT6~:L!Vlə9kK4 ^آ[q YP0lV7D/;)`"[V&]_lҡ|Ye-p_s wM~a7wgO{ʥ4ߦ=]ܩ'Z4yw6F{lwSGިkn_3Q=Iv7ǗW^vc}@(Y#8*'ġ ˶&Ŏwn._̵d>0s")ΈPD%i# O̥kBijjTHd TYzʤ)'dJH qn.R:HX/lo=w_v6Ƅ&^Wg%{4+sw ̄Γ_}&H}WOK$2t͐fr!~œNcGqtvlЦ ilMA^O|tkS6糋Ҵ?N8t n_x݋Ivw?_W 1>?R 'x L\-ͤ5ff7!"*z'° oaG,s|G6_M8ot3126qזֻdm7l"n';"m,Gݷ&FMM{^0dV߹88#`_~MVQ/bkѨGwqCI&/AGή/nDm)5)ˋ?W. =iion@J~୍>vDE~$FG]#^wccж^ľ-uK{=7}qVFTJmF[B۰M"AĪO%snhCigsh4 F$h ׀_KՃoLEjڡ4diGS:_?^V2Af+} 6B挖{ Ghfh_8BѮsJiu_GXz~ K?pjzpդ W_!\YAƋ=+ljjҚpդW_\0>Ʒ̍DѴ3Mf䤵HJNyܛJ_VC,.VǓ.M͕>]PKg\[4"/.יw.5v"8K;MDE$0:)vCN1 늳 Dx@%$UKOeqPh%/:6Lm;P4B&UBE%S* Dp-Zi,zWs.h5Y`\zx/dpB nu1AZNjw|u~3m,U6-J Z!Ec}Je+s\dk+lV*YUb\18'$GY0ڢE WLQ 3P9HSѻ1X-!DYSL>~D_+R}F`vs# {VAb9kOeFUX.5VJRrc@䒳"a',VY)dUp!(m,WWޥ$YCq'Dt Wh\ OXoR),"@DhjQ`Q$[\ v9YEAh7x0Qal8m.A[40?%(CBl&&sh?{guIy?ș@#q:&9$T/lRl"K<6`ɽTuT^Lu>4g*xrPVIǞ+ !\5+B4ަ6ÙP(qHs`udH^ 2GB['o!AOMFCꕶCw2J"M]Vkx5{TQRq%M7Ϩ`[QЯz $$dAqBi5 UV+"1۽@VQ=j+ZЇ|p.i~!>38o{` [[L$`&8nW1D,c󢐴C8IF9!"MȾf<̰{ߤ)\wly{jA#}.d mӇHph.‘ፍP d)h+>$h0*#d`!&SbP~cGeF5-tΠ=h5'Hi\dUʫ &deZP5h<@{ O×Ud 2inuG-=*p /g,:?jPJ4%᫈;+QynMR V Dw*<}*Zq9hP _C߅^z\#Tuo{5t2M==V !nK)5Y$i @L>^!B XZ!hGՌ6XFoN+}Eσ h(MȚ\\=vj:&c,:Fa!dً$>fl~J(jS,Ԛ}-6Ov:F& h%Й@m!@ĘZM/-=z!.¯;ovAV Hit_=-rT%La ڪmryA;;[FŴknxLgmsu&jP`c{H7[f= ]Zpۿh DeWgm5EZSR%9O%O ]z31iG0,aHrT1{EA*WRw]PzA ,3R hTU")RXoܪGͰlEV׮nFqHL1^MA53`QG|W\V1^8+ˡӤLj1XQԌbzvգUFlp?{$mDV\(Z=4)hcٯ E5jՃ*Mg($u21KCpR 9OH:'/u~{P Q-Ղj-U1ڢF 7V@P8j* hh-l4E!W&Eυ4CSҾodG ljrs6gSMhܛvid" J.YxY 5I|IFiB. Z&J#B6\z!hL ##B`j?ʯzDfz.nϗ;7U8`DgZ3]Aa4_~4iȝ}ߙdL;yzf an6=]uy:|}'#H eЇCI#u0$D뭞= NŗII &bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &^. D3jO CiTy0$|$'FLDF&bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &z$dg  kx$LHR$@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@/r-!@NFq@{\y($ϞIIL1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 rHw֣}zTS:az}۝\P8Z]Oj|!K0ة`.ڠ=+fp%K.1kp>8+08n?r~ygʔ3?:oSg}z&//eQAilW/ X/E+dN]ˮvY뇇Έց;W*Jy&Yz~/wt#:Gj |rZ_L6]OJvDf0i;&}59w:[ݑu6nT\xU؉oFXjcE:M=(pO鞷s_&j0*s0R5PY=c+0,^T yHKbp:K27CIW4N瞮ʝu8]tAPAIWdчZoqդKNKl>uOnvkWh1|tjHeu {j~?/W3\wl'eD TcI{ǟwDVf7M+M-? MGNӏ2j%+IO+k?tk=]sz銦i}@ ]X̍PV>tEV:VW/1](=ZMt ěG2/Nަxr1)\~0R]ؙj!߹ĐC:U{נ8=deG\0{:~=!gpt}J%0V.YXD#s 8h8yt'![5:_Wt)C;kn,7?AX%#JB8w&ܼ'^MG߮rȫ}9˴LOɧċ ;s'C1Lّ~Hmk_uvgi:obSť{ 8k:ck)EYkgH2̲4ZLʊT }x %F_E>ۓ::W})8UӒi2Spl>+Eu}ȆqyҰhIh1ݹ%4HhXkóа+Jh#ٙFXY)ke_1L.gm3νy[Rl6/AIe隽-8efhQvec+Bӌʾ Fؔy*? 29~ ROy#RoHfJ.΋b$tKt-tY9g7ZSC}}9m+S1A;:[d˼}>ݥ4?-S;wu崛#*krWh# ZzVO_*|di`LkvHAAn'J1Sj~fsn ܻ4*/ߡv-A'ts*_ ϻ7~(U<8fs>7N:/?L\׻fZmHib ֬6:J YçYt?ٸNc-":.K厥S7z{Q=;Vr5AUb)`VbvWİ^LzzA_)kTg!( yA*٤sT_~Q%ƺfq>uyՂN@%ߚ hELkJV}Q&1~i(n)x$mϑ;ZnVE0IFN־dZUzG؉D֕`p❄?9rڨq6Ʀ@#h<0Άs 3ݣф{oXzHv6hs"%p)s٦,lkdt-9omNc_g-5\h{_ }Fю^6V [J@P%O̖>=KѤVYq}6x?l|Ȼ+s.$ BEK/Zn= HMqQeШ\2'QǢ-.6Nk'3]Y/It{w'I+irFFV'{a6;4Bҭ?r==>.;d4d3!'|&:~4n:{,?.fhOC6 1vXlCWi^lv; #+hz$Yt .;s6oFDdJt Aw&eNovEdY=EO%짖DOJ9+1k)&M ՋJ] ¾ewmF1Y-4 b<$C28h+#K:jy|_ŲG7ж<f|EW]BccvA;Բv5H$.kƲRtݟeWY[rB0mȼ2$&qH $/D{tpMK9;ri}8jjz_1sx'{{A>P="8IW^[V}"r}=ﰞNbji.+{EcͿƳ T*{DY#55d*%J=@y}qvDgQMFiɆ$NYo^Xo"ѲD"dRXݓ*rL *ks [bH&3 :& 'xǣY3A: G*slS B>R"L8ؙ "'9x'SAsQg5qtgCzgq9N&e\1! ^3ސѲ$$s6OA1RTi^וM]Dz! d$ %%T*ufmUK2s-hg_MO^<|缓~6s%ŵ>}͒N.zzqRxǧZwOзW>lSL#C@ 1]OY`ehF"Z2xbւc SJ9V:hkS`,iM.L,DJx%}&'U3VgUi//Խ/|U_mq[RvOem\ wI~cmq(%\(VBN8e.g2ȉY%5&(+VdI=8!pD-IgK&ce.JsDڏ:jsc$mQsմ\v`r6|rTr!:NoLD,7Bq6:m9tupeB"L$(АIȢmG|HYFkί&qؑGldt6~#2yM֎C+L:# v{DR|H W; i#Jј;?R "[Ĥ, 2FGFՁb۬jҒE.E_rȁ1D Iee@gY|-,LϦ`-x? O齂 {c}dd 5]- b#Q쐣q/U&J.W;eOWwQ~{mz$B&=:mi+)q(k,p!i3ZrTא^X44"z*bH0gcɣ6*8 &3OLZ ,rN-R"DJ ]EAI%@om &m -sӎ&%6)|5Sal1]ւO^m4c ?78IÙvQ?"mTݧn'iﻊ'٣ʦexU5/繈y}Ʈ 7KJ4vqHPӠ%vYútFGwa3c2CvR"%r+<H{𛼊?A3t.6} ^j)Őljwý!rv .iǃE qy /K R'Bv^^Θ.`S%)(/+a7ơL/fh!@ ߶x킶 f $ߑ #!Q&nqAnݻtOrZOOd)OOdFâ3m݈>~۶^^Ÿ-~¨VБr2,2;Zǚ%bWKR1-{UHeI H~%\$Ҩq)DYxwZ|0|^;_n}>(3y5yk>`iؾ8?cxxW=w@%"B2jș$?"glA7"_ΙdKʹecޚK^;3?i DDݖsL[[7S,̚sVgnjϪqc5F7$L=YtI0A|'0j^Mb=ĭ+J@)eb>?QCh2GHR㌒M yKJG]QޗK< Qmvp7Z\6:\}B? 9VY7znavAMӻk<~qzAG q2Xиޑ/Qzb0x*~tWwoϓd|)!<8)ٷ? gPn\9't6 Ŗ7HmQ[iRiwTdWh~IAK2kΜ9E.qԷv*]V_WzueVt=]x@f#BZIc sͱν9Vھ9wKsr+j?͓ccFfļD`R}- Ω%ٔ~-;Yj=2j>}z6<(Z;K0ifSY?Ġ1J6nht2Y><=]I{d͘#:G& \:}C|EVNyka}O[z8SaͧIEj#'O;IhK]=YjzjiiLx[b>}gw)1>ND)h{5}f\.jpw+YT2MX# Ki)6ܽ=w??$BN%R4X8')etyt0S,)bu;<6Jn\u&啕61>3WWD>>6o{8zbUhvz cV.l7E?*fG %@.U69#f 9+oMp.H!Xm)2)Zpt it9|KֻBی\E-<IW7A5q[N qyak־zx9K*W򝘥re'үEvDU?֜7ZhHgCta |R^yٵfN^CM~-A} F(ePh0K($ȃU :`Zf'yknS@>ƣ9j@K8[fгϣN<c/C?Lw3oyR(g@ƲRZ؉GT\e3 Q%KGN&{*J{;p z$+pPW@&sCM`~L(%jBVy`d:$ Ki\q s&H%+;jw Bv{zpH_ZBz4:A#gj헨$¬>&_gYwmqǣZ#E 'RG :'u1'7E"E;:g:_\u=ɏnݲ0ЗOuu KRz8n!W=faE dOХ0κg/ڏg~uN$CYf-Qrq]딓W{V`%x1Xoo³U ^s2f1t TXE9UŻY:pɖ SnzW*ZhjKRAJTj=X`aOIZx =j t_kt2x_ђUe^P6,a fy}kgJq{.:DL)e@[&!Jgb >ެI.ϜN9C2h{\JԒ 6/TKL.Ғ+S{-2-\Ii˷ZK|^< fV&-k9vI:p@8o.>ʳ.f( ~w=6w\nD`FtLhDHO:Ḥڤ !x˥peA$$5*XӖk녥!R"1DҀJhh*, E]gk+;cdp_ Űa-y6?bG|i9ACLK\K!6AzI9Iq)$o2sqK (8LjA$P^ > 0=&>|CzBY* 5 Im|&?x 9H2QɹdD4G#[W΋ʣazez;ȆD)qY.cL;n,"ho4 U '1s3) yeհtBVlŎ`PzJ rzeprӿx -yWi`<wcۈ7ҽL6, |~Vemղa`>">$^ wsAJ+;MM(*Vby9G=W74akգ~o o3%MCF Ah%;5\7Q-X%fCw4wM'W9?spElyha~ We.v7 aEή~ϓY&j$7*[1;Hpdu'!O7nH??M @!rwl[T G=yޙ1z1ȳgVY@q)H44w di1Z$ P6qktETY:qsT#=Ƃ`*daۡ7߾x.%|9, rNj 2[Ae&J*1\@EPy/E\ٝn6yLz!ienktg 5u}v^)pvqz2t#@ͧMm+"$BZȠkg7%Z8'۩s۩۩g@MRȺN/ow .=._;wwL뇊¹*W_5kt[15TvJV'OvfK` =pT@Qϧa*!cIV Nbm5B`pwsN=oy Fq2v5_|=])0KpKz1"|46{Ђo[]7ɨh%Zn?`Va#7[:oNjq(tp~>Q5UU=6.r{^}n~}?˴c!=J@? [lKZ1'>J{L*+9`R\%ZV3ͽ&Rs}-Uu1v)&ÎHo]YU~^[Y{l]kFW8N/Zl{Ku,-C%*jeZ\+.RKO]2Z5aUtSS, RWYm#5& @3'Jg]lI vښ`X+I #̧(VR+P&E]4*Re$3H <&nϕkdbo^Pcp[*PHV'SDܡWgp~1rL*Q*lF`Kok NLo6ș-ږpOWζh&sofv잡]Ǔy3fmtDFNf*emC.Ʋ~`n]G}R갻7ؼy0w}-} q2_ܷۃ~ZIf{J"Լ< 6p/rx?p $I#A;D"j@kT K夏ȭ*$!z^1VT ^lI"~%c4mxLDVBi5L* %@Ea-!9TS(oJZн)xث<_ ^8#wh@Qѣ/ ~i(P d~? DJ(,M +W&(5‰ 1%t[>( c`xOK3ғTPE8aզM56׆6Ϟ9FZ-)!8s]|<伣?ѫE_ws#4V7;Ʒ|ss3^yeד7a]~¤yO摯?k`[ή{2*YNN.S+i?J!I릾з2fv1((ܬvY?r*πn퇛\x?yEЌq<+Z&Nv(8n)59nxZwlTߌWZ%Cm!C~(T7j unTiKtUIyn^c /=+UR,&m 7ɈyWIu )SܖtRnv82~mA@Fi߱03sޟZՃ{pk#ʖ 8L W n<1I]aTrxv#feֲIW4"C !;e +n&k߇. ]|C^8ALnXSu:=^BEZC9]$. Wd%u!φ:La} ov-] >xυrT'p@)c3(yp+DpΨ5iH'{Vst$}Ϩ.TS;cغr$x,Ȅ&DOxn@H`$Si&. ZS@#C{c*.AE`jRR6eJ#|,}[am,6?bGˈ|iЋ-7吇s邓L1OFPެV,`HZKF8e8eíAw345veU 5Z6F9X!5RY4NgqOdžqo`m'sđ@(\-$ ýE++OA[E UPPQ*+w*?2 =t  ADʀD|'9X&4\1"kdI3.p3J!sTV_8I o'TDh&/D*3r"{pp-ᬵ?.%+"&įHjm*IKW\#tCE~1W$jUih (cUȋ\૫(_;XEakbiX:&Iύ|x,7pE!s9keŠHY-VH"D"=JTxom0jχ)EB0q"7H R$]Q>y.3lH@d5r*^k+4.I^nӓ 'Ntd|rэYpM0 O8w]s?-ZgljN(]YkWZʭhp-Ճ/'.Tp2xw[*pPOJXrJFV6HŪz6:+ْI^߸`ENxz^^)Bseir: ]zܕ;&Q'Ƕu!q kk6F_{MWǛ/1c {5SVXtȅ2JFNI:dDחUڢ*QEEh@ ]croj)W0W VCrXZ2$R rS&sI):6!;ܒk"'е8!xG5hkK>4("ᷳ}! Q8X&3ҡ1te\sVy>_tT཯?W "lp_t`ouԦ:j [<6ijߦ5Y:nn}'}ޚx  IJ2V[}Ud` HD̬]w޾ݕpd}5cqc|ঢ়ε(Jk^c%2ZċDh5/2߉OhMX[f佟&7w=ճɻmfy;I@oX :Xf;>\9g|6KK9܂ַ2APo43dNfk e 8$aP9ZU|]|;K5K5K-J>sg|dJpH@dSHU̥H >۬9A@u0$YT WHڸB%J*ziś9'm9w%S ~9m̗J I~+|^FRxdݛx.1 6ȸ `V`2U &70f^-D9(h@krCHC։`@(v^<-ւCZPDzҬ1\ VE \AdlBȞ SW&XM V1T8X-XŔlsK kJ1ujݖs^*މ9qH>r7mo*NYy>޾V.ot3:ꈎ;## 7Yt${t^wD%8c4iHUgY%3$$rcC"?bZZa2*%D}N )|mhBSD{켤/|Xdޒv`vcgCMOw3pGC4a悃}n؟ݰ?sO V-znثsn=zGr\-ZqQ/Cd:Wd5Dr6ϔ\t)fr}IP.Ēq+ \_"D Y²"q-dU ku-h =\SZ7 _K̗07%D?$9"~D}iE_ƙW5m- ŷwuGˊ8sߔo~8;9_&{0hY:P@ugoŽ{ o/կ !ÚԾܫ7o:mơ !>bbne~Bf? ~ X2ƚRSAS?T*cPgO+6FA 31{WuT1E@,T2`-1T0Dh]!ilw[;h4?/l5,>6)*E'CBVmw y(m밫!Z[J%9%|4 n*gOE*fv5AKPΐ&Q31J\iO1fZʯj-VU!I,Dp[SXeY8(Da%@v[p 7]4aLLK>"lgxc"yv);f+ >m 3D"27J`%6fg `A(Τrx"Qf[[Ik4ln-;{]鵜;b~V):IɞvQvQlgxcz џA)M5j EmsT ${[RhKCũnұ=ʼn`¶;Zv_Gޚ SlOiofxh#$D}wji [U} 5)E=I V?$zpd-F3nc'Jڸ ڸNL'S;1!#}E MHrR'H!:Ze_tR!%8O[_l cP):hɒYB ;ĆNi2tVa@cB*wlq7Y3 G.^Uf@DNC4pK^MTan;}y< vV$1~Uk vYap>Gg'GH~ySUHU6ڣdn;iV*sE,C>Qu[lԫSR?9M*g* b^9{E*/Pg |rc\ Lh2y%6Yzo_ ˿tֳMu1!ۖ-VW%X 7:Z*%"h8uZݠV>?9N*\' Fcr1 wR$UZE/ D#4J4bg%_Nf۰Lm(6+by#໸i-[S-UYڶAQqD5C k9xTmҬm2"3ڤtyГQ&"*|ٻJ4 "gdt(%`H)҆bʺ{.zS-T5s7 W;\gp_cbkd9eO63Qۥ;(#h< !a&<|ۖlrq~x4ScŒK:} |Mj}/yuFX-NyrHv:[~ χ ?\ Y; T(\ǹɒbTflӺϪ{$oV5c` T"?{ڛH_Aie4ͭN{;;V>"1j 4ݟȢS jm*̌G "SnٛiubȬq!((EBbOKu۶&^Ѡ+'Z"Ü˄G[7\M|,: GԦ#cȴ b!rW6KR*@J]7VEWVjC䏪pH/D Bb?Qtj7vtz8=`+\V*R!ޢx9y p ?'&#u)}b[CXRy}b\B#2\Zs⊱\+WP\q΄$aI/G\etqURu=+Ah2 7uy&K|X"!ׂv|zu\<Z敗7Ll*SbVXRU0Eugh>}8b'Y9a!3z?L&x]qb]NNtŧ&Frm@xy'@L4 8 D&bRF&&IЂR%r'X(zQR>,4uӡQvb6ࣞ;7ʌaJ3ϏSņ7f7ݍǷlܜ ӵ)Imnٵ:6+6\iZu_Yb](P&q>*fu{jsrX6`G7!r$NR.S2] ǜ 8 {ivvL7vQEtm'>G_]J]}uڠʜGQA-2xLy&eríDm(*zmnIr|tn\jBMBKrf aFKq"Wg2X2ӹ ߣ?ވfk:_[*K.5i 'bi3*{3&dT$ M&$H f)4,%;XYq!\ը-Mђv]iXKs*+!mɠ_- ;u~ƅ^~_Y ^Z SfTٖ9{$Zn[i_c LBCXj>q2B8ߵD/"ͽ|K@ Ք\ ps+٢zi'Pv`=u5 W: ntػx 9 *Rx7Jf2qh鰟)zʣQO6Iz//FL FU^]y> #QW*hIryS5\2EKvw|JzG8ʷzHHH _P(nrOQqJ|F ~19˯:x|^u1VX]Hyx-cxFO!wQAIi!F YYed@-~I\4D8蔷׶V^ 3AJ-ȉ*i0E,R;ng"Ic,ђAln|V5H[!.狯5tŸ&IqbowD] D:vDR4pgJxq tH! ,[L2z1"r?bPGȸFEL1VzQ+rGji- .bFE0R{eƽ˧7+zޯ,ۧz/ɷqqCxi'>tZnV-7Fuq> ${v*TZ)@z\h=IUN>UϫӲ:-Y-k?uJזF S6Nz#M2QY#@AbQ 8+wh[DF<|%Ȗ|˦Ij&)D0HZI@ue/Re$3H <&nDV/85P21C7D/(18-(K$F)"YPSmpJ?[5 -.+BAg !eo'C ]?.ϟ|ܽYđ,*jmv>0yU~#l2otÜdE=/ 2Ef'nM鯆6 ǼfŤ]frݜmZGnRggrlUQ|5܅+ٵ o}^I)6lwÝVʨ !ZJ,&E+jlvlݣ`T QVF%cJ*jd 7Ęr$Q^;$QJ%iv?Y7Fbg'w:~x:rkZj|er̆=[[ t'Bț F3[R 1[`s>**ku3([3&'I֌ɠCB*:sN=;lrg9)d]YodYr*"RU5k.bqN+f,b+6it۫IˤN^^@.EAn>TAx r혐R@i h2ozI`k/y0hw 7_y(}>&0Gz.V;q1P-s襴bog܆`KB4B@Dd$D ƳzO1j[U$I$B>Y`+ 2(LIYX*<6ǘҞoaϗ ާV"qb s0RL"sr1;OR@Dܓֲ`h mV;N%~_Ӗ5pӬ= 8zr^=+~fa|W< 0{BTJ`($p8PL!OiAw3-nb'zjEO'y7xpG$J̣G_*P*!,$y@N"y(`+AOG%s=$шtKcLLR<'e -A8N{q}k\}ΈKf}ZlS !+C!"mnGpɖ6xfjSJŸ"-Q 8 uDD2[ZP(%r'Xx?u \ncJh+&1}P<UA;Bs9%i?e#i̍WFeCJst֏z^96 .? yn^Mn{_T3?8^h4k֌gx0 0.#!eHd5NQeX!u=`1]%~+ڪuޅ'#cPRl;P4zeuNsr-4*=tSbs_X܍[e)OQM91 )xtERN<"UORUeU{f{iP 0g8M|Ji o+5 Im|&@ϼa:{$ }x$#KADu4R2yay0_Qǡ㱳:lH2*A{1ƴƢ"Fm ~b3;;#?{Wȭd{Y,eb dȎ3sqecɲLۭq'[VZ$Tx*`j_{: tÿMl>0hxB=Yw?/>< E:+vxcNA1Ϗ{n=Q﬽2Jb,("k >{$Wʨ 1cbWYeL OI.r7t+pU?bwZo%-c{ly)4F2M*X&Lx`*A$*^0 dg_{AigS9Y,=cAO,^ 2is Zj9ʱ%kƸ6:gpuy[eYbo;+_u_u'-{2Į/Ps 'YNrU{oV7/YKDdCy0 \WY_0c6'oK+=TƁOŨvk}կSj.}&4Lzy^[ d\”٧Ka"QE'ru{r]>TNҝ2wm`\=ݯ?uIf'4CG: I.:m^lo槹gǟ=oy4y?9l,YmKi{p}g/Z'n;><29JO|S36?WyjۻSѣSыZՓ1bf"&q0䳰sHPrM]\%|]m .͎ t5УN! K=9-ENJufRm}JS8rb~n*3ZbbN>{|2,P>ۗXnyNbkUvT=?53*A/C *8BrFs9#9=%D-#or&+Q)YU!) uLWRS[Zc!D^S-4WB rF`(@S+hiebhS<|îضޣ ˻/T'Yp ,Pۮ852\@k sy2wfd"eRA9طbv;pFS: ʚxaŶuxvj% u6ㄢeeD!+jgf~Onf~{ ofgOx'C$} 1UA9XM({F\ P/<81,2ZAO2k_L(l}$dJV3/C4I(E&Uܕ^㎞ };Rht["RT mNB]E!%Rt>^LFf9tܣ{X?;}C7~։5p,"NY&wgDf,dI61O!9씥ot cXei7eΈc4 ΌrH4u_VS) ON p4e.X+)/Oe2įmü{e-@`6YW%@9-*d_\"aJc`R>И53g[siflL魳L\㙥fu[ƝK]'d;"]}h1a.Źqr #Vӳ:|s}Ҁ%=FGVuEO@(3P@CsQ"ܔH|[E( Dɀ QJFDP>R662Iؔ)A*}T$֡B+TfC#pܢ PD]3sDQOh"MoiwSl}Zn'My?B,Ip>/D},< 6}("\T"Q5"JW(a$C#4Ε=jј1}|mAD.(:LtրPVk%4A={zA{A{MA(G$]1 r .Q2Y$&rX8(*KC$hen tGAgӮN\c=sP$xX_+r$CDB`es|?;/O>|5bh<9QPI O:uKrީQTrTI%O1$'eYSJw2/$RJ[ir9. ,S@P!E 0v* | )t1lfΎ%Bv w}ᏏN@r=&,'a< ;H %YaT| Z@dUpRMBaLz=u_kR2BITZYˑZЩ:ͫ9;d_ֵ2Wy^y7YWۣ0ŧMPRVm_1y9{1.S_gPㇲw2>@YVbK1[vxbւcĔ)TH8IŅ˔l)MNL Tkdlf؎jd싅1ҀO+]~xq|wOx|v5>}~gFl9ejgLfJd%dW oY%541+'Uj*dME&HLNXW$Jl:Zs?bIZ 1FG_P=0ح1w>9&*J 3u31Qqg9pcg#+0kh2> #dfEG cM"Fbof:zw~3sYP]71}/"ƈDqk䦍s3z6150CgGj<֕#mT_!2+ʵ!&R֨T LH!Z'eVbHZbHX:.vʜ$sWip^i%{TmqQ€.n8!}B E Lr^-Lp)p7FǾxp]zOadZ?M/iSilǗ~ф:`mf?`5~jS}VZ1~VRjjC+pUT0WWc_L_G7ÏPjF߭!yI#ioӏ}385>gt=Y9$UH|էom{y~`aAJ00k L?J3V i`z߮r 6$Z pUD1++ItH R \UsO=WJ7k+EF> bN0X] \zWZ{b+QWj|v\]4`s1Ò >-'^WTG]?M?W0GM*G7}nv9`D?{G~ jEnzf6_}835+l1GKNN Kd*!guTR"P4^@*Z1XX[<̧,UhʣBhBW뗣bKĥ_>tuzy2OO?w|vY)#"~2Kiyo}M]v*ROW.4NWIZTԵ("F./-ѹd4"P9J +r&׌1/׹q2#n'i?us^BK[֚%ndOׅ6eo ܕݛ?5>, yTKNdtմ+Run~ysÛ~7?MOή&ūK~8#%:|{N?!f3{ 6j8G|Pk9󎺧`4zr֩tHޖ L~"M$^R_dlQ7WD zGtR8K $V(T*b0BdI%}*כrz^5kfbwj,}۶_^a3_,5$X;>{vn~ssN\tuviޚ ۚ+_|w9_iܧd0%{ٽns,C2h-K$OlmYnLOUfU,M#tz|sr< l&`힔5`[qX]~;׎iqLyssBdiE*m%L(Qc7w=*[N秷7nIJpU⢤22|.h udž}|( s:``eT2rMX`$kG#$J)$L0ϣR 2 HW eV1'8ÁHT ƹDlϾ{oϽ l>Sœt6}hjeӉakv4C,}=fÎV=_>uȀNPrMEF|J )HJpT&o ,ZGp f85c2(P{F8Ã\jk+W yBXz,w|KG~~<:?R""0hh aP /Dn*ϔU3 $1-= caQ;mIH\$\c) F9(it Suz#a0`v'Ga|xCSuz ^Wb[R͸ 0e\҆C5IhM[) QJ@V mCiIJ%HAT+0L |!ČMnwS[E˵r:| f9vϠ\raRKi_O[J$W1ZRfj9Ja[JiG\nŝJ9PF䷯;Y:mS "wXo-F3)sO ߩ DόS^*"Ih@'K-2 p „1 ɔ!bHIQƽqH"0i5Z>jKS$_FΎ!6=L^+ʱqeJF#w~Q$t7yn-?zx6ѡܘ]ᡳr#.]p'VG jR ƈmos`. yЪ{@(ƛo:^(∮m*i-y BEIK`2X)(P@q%@r X129Lu' RF|xhev)htY *\e._su4"H>@$12 `2 9U1m Y,<-".EKƳ׸sJU*LSPn0L].}tg><=sUpɲggrrȼèDU5ȒUgfy-7K7 VsMv[tAf ͌=uj(,8{p_WCL2lpMǰiYÛUi+5P4/ƪl5sR'hm)2A+,̍7*#:B"()-@Bot>KL =i^Pkܔ~LP#x"'+ղhyה'D")ƨ3*Y%!KMz㉽MPvx&ʣ7|7] PCӋ?e~# Oz9 Zب\ٿm99!$eRh+Y+p0c-j:Ta *SGWa'b/Zo 7 mR)S$O u%pHxrԥ )*9$K-2^ c wztH~໾J S0Vmv\);>&%%xo//<;]vP\KZoG_ "D%eT;oe9 IruidCQ8K,17 O7&R*1A/3;"Cd3Zel  Lpևrja/$KC)8^JE 2i,[;G6[s's5XKE&ʒ\\ȓd68pB9:85/QץlOWr^~O|ؚw|92H9-#"Ș RTAeTR/"(=*Ex]<_;Ҭr@:226<_>g uz8[P[y!F t gS ח 4 <GG&F:ZT&&y fptC5Zk.cߟĞ>%TS6 INQP6()Mx ?ֻoR2Sn?CF]Pd<0)8.Kuşog𻷃]~6\LN:3ar1"wwIϐ l:v?/o|OU|1Um۴v8SL=tÉԫn:-W.cNX#w5mH0|ٙ)sٞ>e{؈ $>diI# i٤(JuH󣘨|xJa`WcmT|K5͗j/kDP˂DYr41gVgC&H5:q] f1*QҡVƺ3Dp !"9JF[; v3)5.iM[;x+z{? ˗}Yõ"^{>\]L[z_5&1_}&(5}1"J\2/EDIE.ԬaQ(aJGrSd jfHiplY{t<:P|@K#8[m{*{RHD"e9V]*FS %t3 Y!w,%uцuRŶu\<y( X\L3n.R{H ;A>~=mKBKNԙu>]ɭmWd9^㳣esr^ %EH ,РeL&Jt %rOESS"?O؎-N%VhBr*aYG!+T+%icvXqbBE'&͚Bf/Gl]͜)]%:r!ϼgfy}Z~#^_+? $8Q>5O_b/bw:81t::s5lRu9J.4Ž//ؐY@$&d*)cEey6Dd!:j^/zL4v/Y[՞ognQ/aK[.|L( =~8߬׬'·tq^ l}ij3 "0 mӮGcn*+nt8B_GBRBML1Ĝ81 _jL?H&aJ׾ӗKAWMPR2Sd& PƒWmkֱr$1z6ؙuv3h{!{LZ x_~SD5-& Ĥg*(/D/AZͦրN* ɨu5JTJ`Y.$ 7$Sc֟zM2@-g׫bŸ"^*Mh%%+9]hxG4Z^PC|r*L-wy#|rq;Z|KȚ jk"$uєH]($}N)jWdDL9Wf՚Y`ZmzJj|&7U+D_+vTodfqnX3 qG[ؖ3Ѽjjy,Ҧ'!痟Oo;8ɡTS =BJ&#Uo(T"l9 -ً(lii'뼡v*P5@k*bou2~ˋbyXP;L=1ؽ9[IuT4BQ`Iμ'+i\MX\HoNwODuc I2da$Nx\TBSLH(Q0J^;a7sURNhr^q,"RgD 'Dܛ9OQ@3 oKkn-kIXRt0EK}sz Z) qq`5UwCi8ÙSz"Yf-%ɤ ԐZGzs@OwE.5 ..MpiE'\pqobd&yLHZ(ڒҒi6FG.h W aűa783 #ŎU,jw5Es#/u\Q(I"i]$?٫.)|,>~€^M'by#Z?\c:~#/8<@|N֡8WUjs?nnα~wv]5~WWBγjNG;:ľLfb/ Z,i9j ޼7^mn2Sd҅%ԭ;:Oqd!o0{q Y |=~ݳi+^gYpu?[e ^ qvfE).Gt+FVvjqwe0!P .Z&o0JoXGZ,6ܹmsJ^3sO(?['a]rK`Zc='X%d' DOT, _ʳO%e+c("c)@Z{v[-YNou3"O1)Oci_kٹ pK5iZM缚*wIAB,T):䪨 &:yyًVjע[k('E%TU)rE ;Ego sMlɳVD/aJQ(9(\CgQ(Am ȥl+S {RS2PA (> r*g90Vq1i~yQCB@<QŨ-(I2dYRJDMX"*FO6N!3~gTL>AT%\Kmњ.VJEh5dQ&uZݨVZΧd6rlQWJrTsth%ĉM1bXߊO\"tʔC*=%MҷP2DEsE97md7M5H2G%cC9:qM)w"GGzW.Y" a*{BѼ$EfVuAEFoɏTY峓"G1 jWb+Ax)pլapլ$wWHWb7bઙ UGWʭkWKo^+K&[/nl\y؈}%]_0#LξU&?Ϯ/f9wܠ`LkLAaǏ1jjE|BF/֛^ XY ivyQvZxvqX[һr39?g2Iq#l޵6(&p4x}mEl'&\?ϯ=/7Yلo Nt׿੪ZDbDdI. w.ɇ]Pώ m@ay}5'"M1#s@; uh`4TZs þ3]E]y{){\b* )]!ѝt"rCn/A;OqK 3؇g^Zxϻ kYZxƵ``c/VM jeCjjIf޳ezI %Ar$',x$k(դQanlZ)˱R1 dR댡d$%,ϰommZ''trC%&ʨB9A39>n%zs ]@`Qr[5Uy Oܷs u7}#[7v_JqWgXv6">;:Z6Y q.\0p:P]D8Q2 _ƤhDP"T=5%rOFZJF[jUB {W򳩒-)fgS_e!紱ŴZ R9nPL4VФYSƣWB̹E1Q#{ ށ ̫Mpw/1u3oOy&փN}ꂴm>s9IÏC8W#&5@~^ǡ(ARyΞ<ΞG'=ngOQgO4Xl, TwT^d*)cEey6Dd!:j6@A  ǯ5VgQl1{1Oͷ}3W1Y_*n^k|L(#K;żQ7_E4 r CN}X6&HӍu76ip<([aQ &bNN 5Lku$0VTZk_~xAWMPR2Sd& PƒWmkֱr$1Yg7sg+bGDC;]5rPuVFL9Wf`ZmzJ )8+qȡVdW6j V͜W׌g3℅_(.?6fͶSNo"mj}|~i~X;8ɡTS&2BFg`FO.3PԦM!x]P\f %{1e)J2Ulrl7~J\)<)C֕~ )Y|Z=Ē9 >it \頄G4Q'vĄaU jَ~4+XP386=jF欋.-Pk)z(5Fc3͝ց#3HЋ3*oA3I*7!!CFϐ dсF@bLP8"F$`Tv]ULxX5L&20 J?ED1"{Dܤ)vz! 925un^S$m\5^[No %Di4g~z(g|{,32i&ERr3q# Gec\Gl+-qōQd,hBK!-D]!qPdF3:\hx\n,(P83_ds7mHrMw:sl'-IRQGs*f9"(ϱDx քvḖds>W8:)ymؕN10Er{, J &".&.ߕTMZPbVJ<1{knGeO\Rg*񉎍};KRIA@$DTC1ju^G$oYX,g.ytg gN3w!Z: :5`lRVM6&#QKFTDCi`G 6J6T%Cqq5z*Uɲ{U,%'}Uϰ*#أv)"' |H%[X[4*^.2FΊ_k]aL_ێZ=@iy(wUquLI\Gqw/R7XL,F 3l*wR\~.Xgp %̲u_zv^7ߝ}uh1/ ж<4S:>߰ ZaRceUlR3aQB#Th0+uq{%MR8Jb*֥> ?^EZ|Uݏz/kYz &OZ]l{O<-n9tYsλp"0e>LVYi#%& @3跥>{~ kB)O)P&E]UP꼌qf čHׯ~1K;$cnYwﺻd,K!767Pc]^ߘ }ōuezs8NmoͪClYb6qnz^6y}WZnևh樧w2~2Ϧ-GWaY>p;Pvm]۔||IM&/;(*sG3F.\`ע31낻J\TF]]b!4.hs{sQ0AbeʨdLIQEX`$kG#$J)$rw&Q)ML $NR.2S2 @M$*\"cޙ8;rwOF| =P\OǓF] llfn|.6+~GlГKG,*\m:Br1M )HZpT&o ,ZGp bKpd3 ՚1whT(]:gg}v{ZwiqW719EM%Y>4zOt,zFl, X1V*T)7Bpsxx@]k?d4:P)Aj.U^\;&2PpZ:i^Rk/y0hI{|\-JүyO4>gm zbMZ5ަO,ÙV̎ ǎ,d2JaQY-s 0\!ϔRsǘҞ5];JH\|hIfZs&89ڝS) :g;0 }=܏A"^Oѓ;swVa^`àk3.OxZ'0{! *i%T0AK8ZBsShoZZ=t<"гzzԐ'&*x ଏI6GEtTBB,$y@ y:A쮻0z~dgA##ǓzuIǘ(7)"ɉS>)H Xm :%L"p/{ѯ1ۮq6>)]Gv# 4?|}qTE Z" 8 sg4LDDp$A O-;i0O^`>"j ncJH[z1AUЎP091Nú.%.ׅ|0wW{$cC&zZ.{_?ϲ&貾^6~Y:%4C UZc}Wڿhlz~C۫f6s^|f79R՛񫟾m \9JʾsW+<+*?R^.Q}"O$g_/G"Ί`_mGٗ_ /_0TnS'WY2sGr #Yc̮gxV0O/ln,ۼb 0Ԇωx]VR7uZ:rSfMXN>gd.41ۋX7J-&}@/akwiӛa`4.v oӱo;@z=|0+ ~Bfyy9eWI#na)P  Y{rir5h^\^G^=+~]~^!"r8uvk;fZZE{PT2/U2?[Iwb,*,+o<n8]86ⲍ0ŒIS HQ7E*64nǴ?Sڐ-5Nrvz`7LŁEhWYȖ\"[w:#m`,P"5}ڌ4k&h@;ZtmnoZI8)͎i.>Xwk_Bto}\g3} E.hr]TBd Eƪ1Wtt=Ty H3V%ڻVmS*}Y4<^-q4m0rlcyw+ ;[9޿B5s &eJ,L4n(*z]H51;f%w&0Ea ˌ+d^h& a4"%ݨF Ϫ?{?< wWSCC|r )ۛ9Jm>+Gߗ?ajqXi[WaR0qtU-(Y! WvV->Yp*gY?q\e= z\I2,izo_߶^?I^LWזUVqAZho?V̢=0_Z|2{7(釢~:TMEB+0+ErN f4,å&kIѬޏS&|v\0ZʈNNTH'-ۍPgRs=;lN}De tGe }fl?Z׫@匱I:Fk:Y"-G"J "LH<!Lm |05u195 ơ\-Mђ(:~ؙ8;nq,h"!.% %.)8@ºl* U]k˭Eq邓 ]D<:e(.oV+b0F$nC@}C"́h$ uo2jR7_ulGnoU&b`LkS0-N}t'Bk4-(.hV < >+B=!^1~8c) B@l|F:|hev)htP4Eҫ>2kZv1$z 蒘HD0V˄ڪBDw͘6x.&B)Tސ5}­4vlϯmHW!6P@v`l6mɱ=I&WEɲ&,Þ,Q?Vdi 0gC 5P"ЍVtӵ1VB7\1ב\nz& RW? Wm$]O;R`>~]ˮ=?SL!4nYby؛wecu/շG( u}W+ǻe8֯+?o]]/HϖHS\[X<,g|SN S[4)zчt@(qGGOpGl>_du6]gRYxT[=h|732: }NG_."F)ISoƇviU{/p])sJP>:W0&sO}2IUчJ͸Ew0bvfg`2mj.?n6w5^ְ >EȜi4y\m]/yxJ}jwS?74:Ioڜw4wxj?JwefƸa=E8%O4Zϖkqf*dbWKdx,|M g9ܑ'Y߲TȜ ~a2} }i_֧"Vc;P*ͤE#. `_5Âč7[tT0Z` ot>~gB=-ECOη)vl/޸[J=;舴8,ZLcoepH|4i[9Zl>H9|͉MpC曬>}CΉgptmi#3C4')[z-hx ח~kPv{oo?PCiԭҪF[j_ޣf%r:\MPI.åb/-o ڊ 4EnQ'ynwu(<]М QBϒK YcUov°.}oVY7׻VwzU WxWQIf o@G;%YK CuXZ]ʪJ&t~)J+oJ[:視a =6ly@m]FZMu6_iwN|wHnwNhw.㷸SA[4KMou>vvE: ̄'XZX` A4&8>F9Zκ6Fk/gì2ZV ~`+`r~JNWX{+'@QtR'B*bڈy@Á6!R80-{$)+%5.ZwmexKmZAuza&4m‚W/ty v>!mcP0/&DuwNіo>c{]7~w#:WBB:4D(im|@ID|Yx(k,5v>ڄ;n\yZ`y>T# g<젼y~jQ*JFԖ[ew/_ mLs|ыB/>Phcy 7΃4e >ze\'eT][1/2fsRuj:#gwk8?<WG).~ۣR >UqE3?әNeb.2%@{3lzdˎͷo?ƤanT3moFo1x=W2{6}zY Q J0gptt՞בVP -DG%xF tbS^pxyÜR|4lc:^:y̴C9eZF\]6Q'۾9 ^i}[ᣄ@.ṔΔz,nԥ]UOS b~ips/be%s{OgܰJ2J۷o'a;>s_t"~_?aw)_N߱VmӶ_LL=9|Vf]*ux~{wM?6WvcN9ہgnB5d t t`k%_/WMwSJd %_$/.Wo -(t A&_hYcK#LjE<|zYa;!4U`:IUĴRW(X HԾTZ]p5A\MF"ɵ6\I'o]Ji &+%9YW$B6R1rHW3Vp5A\i))v% \Ų6u\J]p5E\!{FBc]\sǮH%gpiɺ"9$d^+TkX"XWSĕU\hPT<\\rɇQbe#qtYrJ1uErΕV&+Rix+ЫS Jw{,)3ÍRz!m&X0L.#:L`zJ٩V{j/{.~ث[?oůr)aB- p:*IN{ۻpn~^u( N^t^^vbvM։0dd4<`P{|껾-'6{n/ίUZ_Z?#9q-hDuVxqy᪼Όpw_Żu:Nh62M_0Miv?OwwQL+( ]"&l&Bɂ " 0N+TkKWҺ :'$+L.B% J^bWSĕi,W(2]\uEj +RZJ8z;m@5SO1n[cV `0vdrZ;a*S+!`ZL;QhW(XnԡpErA+TR:LWĕ&Pw`gB,6\ZuE** ,wы\;v0Φ+T)R[pu\߈+lY6"cҏ]JQbWSĕvR;PQ:\\MԂNWu5I\G3 6لIU U+) 5anއbN@p`Ek 7.^ƕ~|釓t-o?}xGm xR~r?|$^}._A=DLc-~$#.G_FvᲝ]uV>DmբG w''rO-r /9%qJkQAn=zH_/-*4vhϷ<:j.h]ߜG K4csy^OxJ  e\iMA#+6}ͧ=ްAء_9Ŷc32 RgZ c&Z@C h`H3 v2\vyHTjUp5A\I!_=#蠸" l.B1T ( YF"u<\Z>H%wWĕԨ<#\ig[ 2\Z3H*Mq+#a99$xN: P-eJY) ?nl奸Bޝ". :STjJ)G[uEry6vRT"j99(|++ Z|T)bp;=̘bUz)]bTSf`/ct 8o͕ߪ$X˱arȘF2LK< 4L;\+lFBlpErWVqE*\MWB.#\`Y6"cw{&+T\MW0*#\`+D6"+RkwIej +eLlpEr!\Zt"Į+MSrP.\\3HjM"-: q)&8ؕi+R;Va*]q+tVr 棗&w:_Z:Peru + KW$Xj'Z+Rkl"$XH,0&P;ɅlARkźpewzKƓv)}҉cZKv8o˜ "ѷF?L-#:LK̪0m z(9&NEMGa'e3r'ewlUҟl<*JRlU2h87̨=ؠQv~|H dB\znKp(!GP.6ɵ.բmM*7Ş-,(2 6:\\PԎfJڃJ:&eN" ;v Aj3Tr( 23PɅlZ"N+TkyHFW(X|A;vAj7؉Tdpr  W(f+Rlpj%+TYr';ho,]gE[XE8K==޵,_!Ŵo r2<$/}ˢ"Ҟqiۺx-ZeHnVx1hǮ' m"mlobAC@Q#7;{O(?zUҙ:~!ۅ>+Jڤ|> \9<((z=].rY]"Xzgsls8vhCfm+]ry7*e?Yna>IK|.Yoizy;Kd@܋h-BJmzxƕxm"/@$P~ybzaZ&~nQ'an>?yj~JybzEB@r9ߤwac%u[\,qǿ_&mI-i=d7rdL.qq(GB"W(C!;|knćGGh/ۋ5V_kka`V~=QQ*Rw5r ={frL LvRR0٨S1X,ם?)3fTR1F)K5XWեr?Ql`'nAqVPk#:5`N]e,Z0Ę=c ַ͉ZCB[X%kbDbn1}'+S4)|n-6 67TSԻfmV0Wmd&CI9FR=F$B Sm7 TgXƮ#D34fE̍E^3.KNI9_}k"Y~whubT=Ĭ\ca;a&RA4P:iʹc0 !_riXhd5|/a*>#EG(pͬ+Ј*Iu|6g >BXȃcFgdɺ3o4>5ys*U˝ֽS $XmM ;rҊJ+ \7})9Dk(5)hK 9¯QZkdIX^TBZZrPR<@=%#Bb !cMI\=D@$X92bJ1U)PV]|G=uk,)0f;DrNU.YguQ=5B }m.5SGݎ`ƤUBi`ES }FmQxW 5h NAk!O4qwuh;gڴV=k(QN[PyU벫c*f$Ly6T:C8քFnue+y ,q+Yge0lh ճk.keB`t /$Xe=Ҽ1P k9FUh]%`}5 ɷQzҥLcs'[FpՈTb0VjXq21Mh(pu 8) d> Aq)JMdLB5Xvl:L% Wћ+dW4;.SefH57]\Q?,14u DcHs5@P6{iLsw̶hU(·P[s : 9 q& Bl nKB`@Y\OƅZuT&;9F*AC5lD] QQ&A(xufYywDyL_ 5P2&@ݚ2&@vTmXtu~V% 9ՠQjK-"VwV5]lv}:ou3Iv\t0f&9hLi#ll}-൓*٢b1j0Yk t/ y"<4f&fcdgrr0հMʌؓ8 IE!9)j~@'ڪR7&%\I"MA;hF L )cYQ-HOO( 4u顾֢뛷f6vł*'~V"kc蓛knWP %֓DyŲ*aSPh=ڢ#آH,ܽCbRuN` LB)Ѧi.Q`2z hNm;c 27+VjHk֪U3| ̤yJ!&ci@Zq!; tmr4=B])4>xZz% kN (|5jUNàe6M[̀z=pes!4CSQ3&{|tc;7{j̚h=MGPrm⊕d 7 NCiކ`ݬ1jŀrD$b9\PY59Zp: 1ib&l$Tip/hm]~B:D;y0B.8 R GV7VVް:6P0jɭ2vKtTVUV>2_Csٳ/Ceۘ??:|m[Jys ro.l嗫իnHO_tXv9ݼ{rX,x˼b})iDN"D2l~"9[_}򴿻-#'߭'Wn~/mo)Ƹf-9cƣvR_`ﻻ*h\Ć8\s4N{@yů$N`8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@?YN HuDN (|'Ѓty'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q:|DM9&'G:'Xq WJ@ wH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8~8'P">A w'Э;vpuNۡu8n2r[88n?ڸ $Nowt-8@.p 'L8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'}u@nZGV1vus~C`ܥ_՛W@4>&[<%hK@7.i^K3*ڕyƺ_ 1{˕j$*L,=D?+[<_/.WݡPjcJmxtd&w7ApD4 -o.\RXhz5@yR M?8&`oՀXju^U^!ҕ`$>"9\OXjet5P(tMGVGCW= w(C+*c6eRGCWBWmp\OЕc] ುjGsjl(BW<;{>sWc6}+ J ]=@ tL8X{4t59>{ e$PChucRWwՀ@@d{CWnؠXmJh}B QQGdیh[vߝo W_-r.4}KlX;-}$eٖvt^m ^QoL(űHbu@š?=|׫tnΖf^ʽx5 W>;sVOӋzsZO}ʫ%^5u{ f(,miuy8ϟ몗6nO>8~w^7NjӋ ;'{O4gLR~x\;uوls\v=Ԭ޳q$W}HxqX ,zq+*e*aш", 3yD t@t [u[Ky`UzqyUwť@x."LJX ֋P\ G$BJJu,*a塋+R>kWR1G#h\퀭 U’Q\)%4>\8c0+w=|W{jRe#ҿq.A2>i$IթDJ7_ݻD&˧chFiHpbQ l ; ]Z._QW\w&gj\HĊNJZ c? NjY ikV둼][~(-6 /3zdtlJa^iGjd>9HysQ b| dZiaҠ;\p/?@,j4Z.B+NMF8mFB쳏a-`\J6OX,[6,r!໿^Ǡmݻca<8no:{[*%\2$U:k?$BKNͤE5x pL9¦ɼ 2==Mh\ M'@&y.]7a`9G+xT.bvU͇,ᮒOfKFU ~¬߿(}\OD@a-!tDARqRTMO֣vɉ/CZKHͷW~^kYjYɮWn)]hvdJW5gҴ$a4*ոxޅ:%gKM"%+g -NzܻаtQzVWG~s֩yυ㴯W~dΆīL(=Dl,JBE' {AbXD*f%|Uz'C0!B*Caj3jX$"﵌FZ5OHKFѹX~ڠP][uop'dp̤?V=SxwГ颀 qI{;#}M˧&)BA'|h.&o-Cs]?rmln_ ad^Q`-5r;=&-껛a|dsO~_a =9n]g H~=t;,WlnM-f3ҭi֩m 5D 0nUZ,M|6/3nJFݟ[;J:$Km3JqJ͍]9ܘdvrz%ᦄ-5RDr̽w`K*1ci㭯{۽v`RlH cX`9!4 A{đ[ #z9F$㑅H>HԔ1тFQ4`pHv m5.&| ;hprLg .&ðݴu+;E9V9[tپ4 ㈃1DQ+acFIŷV 1);y-Xhm etۨQ1,%! j;1wV8qw[,6EiJn+z,KM({BO@Wңi~$b}8lOw{E $Ties9NxnŞ(\O8Fޗ֠{7 ,JSi\r|šj 톰jqmwM? b&`ڡy{߽[}9SmH,vQ(рP~C8y g4cb< KHD !BH K@cfI1pJ !G&4d6v*d =>X2E4^JZp 8|gh6t,:C~0`liK[z=M]99OC+d?t3F)nzI7g| ,xpE a" pQ &$QD <!c )@#pFLf ߋ<_1X,hH 0t@ c"" Rðľ^fo&)c7}x8yW ZAxΧOZ+W_ `= ]V}|W*\lr5m]qv9 ~C1YOq;bUBn*MfdƮWeUP eҥV>6 m0;$rCu.lU8mT,DA/b=:~X3ߌ]0g MY:kSX;2F O·hF4w9߄i+\eoft SX"TNݑ,^[U)Et*ZpbwOߖ [N\jثq:T-dƃ_NFCwQwڪDg Y{Q,OÇK_%p6 29PV~$Ч]H5M;? U֞#D͂VsfKE;v4k܇|c9dK.r2ԏ&ZbsMK%"qƁ^g 4 Nt▟X:z A W xlW-ʃ:zIuP ! y<1aK&::u[^7B/Q<, gSFhcqvvd;]N wC(O(ӡ^x G2RLĶ2e"wMBt_꽩S. õ"t3ؚjv $(µS݂1ȳ_)tΎR#Mj#We-­&C'V~^Do]ޭsGl钔n~aa[V +8t.PnD[a8޴$zT;0m(4PL2B{xbxxB^Eo AI^G*# FǤ"9̰4N+!Hνx= =P~x̺SF-%R" "f⤍NjIA+knI)E\> uS8e6Sؒ$N w_YIIaކaj8[ʳVRyXdE_6 1HXSOԤ}{ hrkh(WOc$^[!v5N&0C3nvz )9l*̘TGԪwLq[e&O}\W2u?_~&P -#ignw *Mx |Q"2y5g4N OEZp:FYynRqR,L<'&tf_%"EoKVDL>dao%nk&W$RcVx%u K yZ<@c17i*λތ>L,QQf ywB4磢˖hF)lJ'Bz'f ,mҲSPҌތk 6dI Q HxkN.5Azv!(RC1uIeUVy_P__P镡[ݟ}2o- w}"ʤɛOu@Oi>̫=|G{X{44ubx}P2OV[$k&uqʶƔD}y̓ypv<~F<~Iٱ2ehʡڠgCkMlC0r =[dl2O6RMeBN!0r&y,K-I0sn'dz[|ofTl6xm蠰κZ'L,Ayj`%,ZqoBS)rKb #Z7*XRl+A%b(Rc.cw5kUXՈ1eM2l4%)bkQlR-Dc- G IHʽj{*U˾z!y1*ϙ5T5Ɓ*y8ΰw͆]~^t0 Z{B5v7Oo4`Ѫ3MeKV2Ekyޔf M.N@SC);Lƅ܇Cطk޿k˜SzR/k>5b{O,jWtk- -s }]IW2/)__/ͺ/XOU/_ꏷթOy/¢~x_b;=~o?M1CΓoNۈGA|U4(u)(S"u.(v^6<)|ho{1.otD9ϧF7ͳ)FKYn Üjt+=.e5^`Y 46<̽;>zz2ա;e cxy2M&&9^},~vR$MFɂi(+%L95* }3[j0@,✢@i3|AwC+zN0^:*a ix_#f/"cO1mD3h퇏+=|}lQ|LݽNLλlP5Ddm=3o g' }iB_C䠠*ƖKSaĔchra hcQgUj%0"p%9dI sh`[iI D!! F̹cICZ[uŏuئ{;dH\MnD_NK"$lmR7:P EX3A?)4շhT Nz2KOyk9;j}sY(VX9;Mqx8٫/UTrGӓOGvZ}7ovQzeehTC \tFT ,JկAu ݕKQ>s H52R2XSs qVkNB- x'M8̜;TUa#cXBZb ~xGMyןL|߾aջ[{r(6m]!o]B}trkY1ޅ5[6=)hlJhMK!Kj]@.)3:8zc9GttX.Q̹Dac.Q;.Q{A7r6a)@z Pz JdTrҸMQ\L(<)*BRlkXSɄc&IyuX%5 ̹=nHy9gG}#"D%"|`RoN Ţxtb8ڋ[:PҖ SVT$rz` ܸzg;8JQቲn[ʤu{GFsoqq=9ל:="ڱqaK\V{EP9"*-Աr5Gʌ i^J-dRsJ\CŹacx=^ ]\]i}Z?l[W6fVFQ;AHк 6ꫥ;l8d1(frДqbԠeg*ShvI'~FI'~o=~D q+_1EZ":kц!') F^)SQ3xsȄ2)tj.$DZ[9՘F f1@H%',()~W3椃q-^3ܛ >@ :NCtKtĠ&,;sg=^zgB%'%ۙZ O9lz="s5X& @}NH`skl,GgPC$Ja;φsG?9Y_:zs@ÏӋva^xvȈu*RVRFK2֚Pѿ.Fx[`墨+a3%in`K:pk jI OpyDlmnA4.i&/ DsiT6`rǺ~<Ͷ1׶Ƭ2ϻeFVlsPqփmi#}N83VHˁmhmVDԶ'u-F&Gi`;ߝʏ(p4o7C:dJ]f9e,{eqqqf_yDc2y⼛agd4Sy{g=T^Eނ)"*n9CTDVE抹uib:M~-lfAz *VP6Q~>̜;=I'45Ӹrwy]uuoKJ7?O/[xpM@\srJlճ ٻFr*@M+:+\a0=LwnEwDc˲*_J_fR6!{Yo*\\UAEB>5\2j w[a%ޠٗE+Bu?27”9~ک[zyvЪKnӘeyu\2UZLl~z,VoH#N`!#B ;eeۑge)+{>!Ԥj2sA`˾TPA_kX5Dvqꛙ9'BkbRTᩬ<ȝbqQb:O,3`%ECgm9GY_ӛee`ǔ?1ڲ1 PAÌDuPdسm:#xI9ڥ6F̠ukLA_&Dˮ. j!Uz궜#e GSb5Q&#FҎ" % NE \>C{6˷$T^)Zboe)C1xNmշ͈ /EE+i.G TqdZ ư:ϓQZ?DHHR>+k]Q%~,/'rnkcE~ֵ_xC0}j;{6$ QL-(PLr> Ŵ)m'/H1ⶔ~z_L.Z i ሃ88P7MLNg*'p1)_C'?|eeFBˆxS {-՘ѣs!xjʑqT٠\JJxP@ڤcdM5GnZoKYo d 9*m )m"t(Vj\"V㤼]wl +ou5!oG;7d^D=vڎy jhzfչj2X˕;u~`0u0Yѐ UnI'a t~$ Znb fٗҗg)X.rA zK:u)cxK3JJֻR SXtVDK0Yl;<j ӮPOj55mtϟ?/qL,|V_Hgw2{ ~;;{iK'}~?<ͯS^@x|9(Z!w]zIuwgIo=Ԁ\5c}m6"u *mc#< FWqݷ`P>*Rfle |R{\q5\qW#WעyZo[W@dO"`[C5àB9FMkcHdQ2&W!ig1ؒ .ja9r =7yV| \+~]Mv7QڣmoO { `6;U^z&~z`C2WOyxe`QvH␜# N܋^Ĥ21')VfMYTp7D_oB1bU Ԏ!Bh+'T(X5B6/c 3T붜#3nUm؉Z%,Zۺڹ[,$Nm\v#vG;ㄎ 9K^]5Y q&o3ۋxM&7p? IРL.dhD?[A"֐vVTe0QQ)^eSu{N {%_+!X,殐hcF\T65SLUj3B3$Z_攼y7rCVhL݇H́ݨ~}#o5 z N睠' 99ex8lɽ3Ig7 dcmnȘ`JCVEeM0_ 0qtüꢙQa2q6[^]kgUW}S(BVYe0 vAb'zL0ʽ4TgSl14ط[N= qS1Aw/ZLkِ+bwWWPNƚRP`!:ҨKXpJx5iGeŎ9FĜs`)%{U!Ԝ(Xr*uʚʁ˅IgSE92;[uP! `*9D` WZ  #|Ү3출#K5cpbO'**)c|X蔏BԬkmxk,*T&Ҫ/o(/sKD*e5mU8 M%- ZqC͛赜Êˍb<.hcYL/吽{|;D&5V)W׋hr*Q6Ȑ gkj`L @|7SD}9Q;εetJ)B(ATJ4U+*Fe출#w*la78ζMUmfE4dÛ˔y_y{? lv}?|D7J 58[+fX Lf-Uom=@QGj6`/0%Mt>;du^SUb;jCl̲BzuZae+&caE%"Bl)$(g%  \Ђ[͉S$ C0Egd/N p%/T}aˠ~]TH-"Mq{#7뉢mg!"xG̐GnUxɁx{%@f:eAEoLOrHl PuA(Τrx"Qf[[Ik4Lnz'ZE>/XuV5nRr].jd?f ұ-aȺ9AҪژ4N?TDQ2%Lv5Xa78ĴW0af ;|]祏<!Lu44%~|?Pvj5Qzt{.Y,s rBw\NiTiQ>vMJECQ F7$zy}7ʛ#e+rrl'rgOLԐ0鶜#xbC,ui-ĽF?#zDm̳˴t;9)O ᪨5 > 9״Z*ٻj4g?OzQCԜ|UE"o]6+Vމ[N5pW=FQjԎ sI;t5t`IYW Ĩ"ag=붜#lWƤf\>ABQb%cS1*,AK6&e9WgvTJ0CX~+@ZB'΀jV7fS4Iw֫s"xJb FSz+p[EKDs uƎXݗѰzI,Y]Y_~FǸi- ZEba/;PT9͐rL.Ucߎ-e<ѤTvStV5RsUZI&[tUL,x*b..^F,ˤcFU|v_qˢ9g2dX6ǥ;sK hskBč{'n dC)|.2/6a|ocL6ͬGs2*rJCլ r:Z&m H'\%t&882_zݕ8?LIYqbM{6[=.GqZmQwAvA'п{6DO|H[_fei5O-_tv;<߽KHOM^9s?<ͯ 䩜fwpQ4f{v.oGBY zxPwl'm R2}H%zyw߅t/&gFCai0G|.v7[w"o"hr[[;Z!ʁ0 nߗ'xR'aȄ(PP,Iq8M!OHiA3-ƞld==yj'&*x ଏI6GETTBB^0HF5=du݅Qk= q?0xT#I4"$&%T$!'e -Aĸ\zPeOQc' zU@](Y ;)E][vC;m@v/u37r387ȳ%Cu|wP\/L 7As\EvZr2@5.nPhWPAݚHa"I(fQ6SKN Bt9ټ\^< }HGͪ #7sOΛ]IaXIt144?zug?$Φ_7AR"l|xn8}WM~(GĘޝ GKW@|o'+K]":wW.)9bT#*I{yj2[ +}_>k_~1D`~jo\zO.aeKpӪ e#Y~cC\lr>]Fslr+F,qrčRuĒbލ8JۺrMAr_סJgn[ ܻ8~r_N.!>^De&>m9&BD RC48q d\dY3:|b`3@;-AQl ~mn_lZ9z];KZ#^ ?KQ3,듕?AJ]f}9ոM;Uڞ"s!(\DVyHd4 +O<uN.ƢenE5nJpԦO僣h  3BMDC^E!z iyPv;v5YݯAzG|P9L&j|Segfx3,UlJվb|Y\*7y'򃛿>O8C8.{ Wyo|WO8~~Z_h4{AiJ7Xݳ "!o)Pv#WY{P%>iϞ7IxJw YQ zp'{(iz5wsZx'y9-^wy 7;q K/*E;4w^OBwiuY7?XԞYQ$d{~2eУ.7IfȗgE_/='}oEI}.0PTJm(-I)Qs }a`z_G° ]%Z裬·[to&v&RӯM[$N^<{ ZݺZݮ 6;m*gMҁ7Z+iH#J "LH<!Lm/Q>piԚ:?!@p *Vx-fXrӃX̆]d$קo*Ix(|@ rGwBg61G\$H`4śՊۀ:>C͒s)HPWp/MVǦm*i-y B4NPD2V`T#ECBeùcP0Q}@[eS ΖmPC٢ ڧe*(XdrƻHzϹ>=*Ѵ ! PADʀD\Xe,rh 5c2$(LR=!uKH[i$}v NJk(wE' FRf}r+66kY@rG\ɫQE8ٝIk8ϳ sr><g'ϥzaۭt1wIFbcwW>Ʊ:av^j?! 7;g7>YR3Nu?G!?{놦i^mk|3T~*Qړ!Vm]*,{Q:P$*¢R`$rۼ re*2u&q9U==^>5?Bf7_$ZqF"LzR&~1>q2B8_7U5$ɞyQQ^vFshJNps8󣋣lkE )XO]@; Vy" 8A*Ƌ ):Y.ˉ9nnEmex1g\ߢUm<]U묁D[>ij̀LۜLބ),""foQe吤22Ύ8\GWIl͏O\/+|KdF97(~yEY\XG6^'p'5Â\y2!$Bd(YYV@-~PDFTM."ۧZ妬jP4D".g)rJȖ@lGs0>,Z= b=y"+jł]0^MhPl7+oe#D#^0kA@ J_y1,9}h%%eHp/}ܖꕠpTo3V^8ɟKFLxZgǛOk>MKe+^i|Nk`F|š;Oa̅UhY`,}4;MR*At3.FQ~}4a/EM~K#6)šT;˲y17N[\Ҳ=0'ʥ;d_ų|@E*E 21|k0ZeD]r+m"%ܗYUZS뗹Bke-w+Ip,F-iX)8R9I@&Ť9_,V^ܦ`ZIlpbQg8xi# Q2VJǽES7>s-|>%}>sÁ4Xj6(u@&8g5 hKX룱AEœh2Z A!DK^-Ai"KRx΢*n#UI3e uLi’`B`IA (gyt8[[xހIP@4I+R9Q*Fy" $%48j8E L:pד@$èZ)E(X"kQ\qfr~ C0+6^L Qo'vҵ|CrlExޝcyUY$-PB}AٔaI P,dWg~euYC'{4VETN8+Ss'JhxYFJЫHH*eg-ʞh5qtή'E1G4' 4X6l]) k(yu%Ht=<fBwu y,'8{dFBD)^w63M,%%^$a(%.봈,sqQq!=W ă\f!e:wYuVS;!C4T>ԚN&+ռ\Yig,m%x9["A9@Ԍ'n܄bַG! >Nk+ O"RKS̥q9 l-%/Farө8jzrT]V8|fxz{oY5>\R7O**۔vdp)B)]Z<=$tdRJ (@9Ȓ L.9m!X2Y@Jε @62Vg?2*rjiƱX*cX\0-yVf1}> zx1}4P,)Q`ThX҂ϙ7 zW Q\t2+ZL(()MFE 6oǜByT$uug?b\̮v5 j{g >'"G='Q{98/92X$q7bq.ytus"66!L̐+DKXȒ(\2D !k2XrцُS?`XD#NM[!kGxt@|Ly+S#]p-'-rY/qƌUV1wC)8!&G"[JI U,TTFjGs[ ¸:UBE{\qqg9?,c"d'-$]W)KLqx,0^d1+x \LyZq,xxd lUbK֛ ߷{AN;ˡjwqQvoX \_Wvk_Û/s]{vhY/kr8.~ӕ^mn"Wws\kI a-GEƕm]M}ۭߛG[\Ӧz2YΙYz@㶋 }فPƩaebi+Zd'CymSeVSgT{RDBvyԗ6*^.]=Y^[zut߇ #]mϴiIayAUI`g䋩r^uT9'i]r^TrV99N\iޟXC5B+?EJK U鴅t YN^y D̠54.'C~MdzIWcw.޹4Xçݵfrk#wd<1n?( y.&31rG͜͟84UR/qpˮc2_D%8L;vZf*N,3eP")7?d۟~o%xBy=/IrX* k) _7lfaJW?|[~w ln_n~RPcc4s.5)Ov5nӻ˟?//%g+f%(/oפ)o}Om[jJ^ n_W\{ ~OfokWs,F?0?~ϧOngX[[.)h=ZȞ_cƶs2\{k)s^Mi߶V Sw$+m!# bQ+Ic^hkJ?|;Uz.}uO]cuG@zJmWiضƬbyǵa LoF-b0 ^3 /f^f'|ŘOo^y;q2.Z+9]AuKhOp 6 ,+D¥}G;7_noiF ,jثQقۈR oKf7)uo*Ae #_G= WM+9ޟƠH}+=:H^#BKA"Trsrb (Ճ^+Y3Y{։trG? ;U/8T9ϓ,䎝,Y_#Cn>5ܠ(g5&YS<:ϔּ.^:֋Lu~|NX֎'Hk= xZjީOw(w5Q* ^#!{N7rQ$~yN#\cHN[cvVT=Ԗ:`t3 .ȔKYft0etZ&2N"$#oMF $hxy@U~XM8VhڙHuʱI.RC[ ͤVxa6ŎV?t!!Ƥ%q2{D@ob m5 9W&(DCR]C:&.t,E0yb @?(lD΄l ƸVBp2gZ{9_!5QݽI #6$waT(J^A{QZRR-i+QgjT¨>$N`|~s;iG덺>"U'?,.~?M>C!P<,f狯`/FI2I`T]8Awl;5 ɽ>_4}c4SG##z #ӴH0|AT<ʗ;%_dޓ@~/GO"Ms:Tp$њ;I4\} WlV956mR]y|KHꃈy=>^}J $CʃsqpdQ)P|WKvsyo"fQ@M,<c# `޾\.]<,Ee/73Mas:^nצȾ}{_NURW<]PH4c\(f"g% c#L /ДGQ t9Hhe;Dߖ KRz4)eYaˆuA)/yL67I6oL&⛰|7,dbbI#Cb$ͷs&^>rϨ!~E\^#\|yy"&P 2X3 MO-F.QVH\!B@|^l|{g'v07wy_1x3يbo;1yƢG\SCf-ֱ__}>F.i;m󫀙؆ =1q'qKeJ| NJk%D TŅTy* *8fkGDBMImshw_ IJ?Q7ի+/b"@d+tJiqJ6^[vJpo-ˬcju-./Q_@MU韐Ie]_(vV 4\klx72n3Śf'Q1*9~ؿf_98׌/:(:bIheK)ʅî|WFS'Q(2`8>h1()g焁ȗ3 $1JmQ . []YeA X(V:gePO)"fP? 4\cb;dԔh X>ǤHsi3RTE!!eΒ~|iz,X"B"भǮ/*D˜&iP[꽑@JE%4I)-)aYZa R$ᢅZoK H.rɵ W|`aߞ Ӂ/Ԡd5O9_^\&Rw~y?]$TbVvhK%k8e*Vİ+V؆T"T"Q"Y] Cd  :HY8sSV`lO\ =c@$!i&(&FPƩfy@]Rͳ:Qļq>HF,TP!]~;=ϵ[Lsw؛fQ bFBxU!xx2j)4H}k[ݢw5Q:h֔nˠK]0wEf7pLS w6炵8vTUqWmN5LˌSc"c0SYNFh8vV/ Kƍ(}C\OB`2MyR_M>}ȳ%W' o8Ln~v[?{{dvi]F]}d$5H˫wg(xA ݱ̺͸5\\8\=eۤ=ÏP怒ο!J`dCr !JuN}(}CjOm4=;Z:qlxw=hû^^}]|>"(e > ]p+(`o3$GȹUnO{숮MO?8=U57?,ޘ?߼+,woro&worH0S@WӪ huAZkjC2D-2Lx?asma,xvv T{ۇ]o0k41_o|Oo?J>88R,6_׫%:,l Uѐ'璨SAgJa5`xsEZFm#ޚ=WU֪Pz,yTcü+ʓZVMd~r|~}盛 eio$]CՇxk2/C}~^}] 7|^'~~)[ icwy1;y< y 0lTc X:%c X:%c X:PyEu uNZj .R.+K0F+l?O'sJZ :HtZ$xZΐ '/MVn{=8m](8#3:ʆ1@X$Wsa0&!9k۞ĝZC ɠ8*W*]Bx)`t*o#jԞr Rlһq許w-;|zzWru_'i_@fYt(cy˿ YS+-CB*'쨹4wf͈"6n"%YV;`t& ̆tŵmO+ 껃D+dKBaZvUc=kFǍmCPyPƱ'CkX*-"F0 cAm3L(Q[SlT4@K"KC;YeR60iYc-ȠPI*^ >A RF j'q3a<_~gv6FD NHT,F!\a#Ⱦ^$p}h.k8#jm"7 Ŷrz o==k~w6 GqӒjU4$"2@0-FU{D!Xh"U]F"wuP|,?7)H_O.'!)|ꐒ)JLCJYd]Fhk0ųhG3 W;Lc0_c:HðAq ޥw閻\/^oHUAXBqVOF]Mb8Fg\`g]؆u])ʒZr;PPLf/Gc^3|ѣQ}fi 4<v^@8$2`VOL|7)bmYp[TVK~ K*EMBchl912HC3Ph7z߾i3Ox ֹI?Nii;b콌׵mP^w;:uӣ*>|]E3$J֐;4v@(3`A=D!cv"#>(%&D)Y1 }Vlm,^e@Oɨ)c@*e(*O9bL XtA /4B\- ?i UdcHl9CVhHhu;O,05W\k3((Y Np>/}PXDsfEg~E~@ژBEE@]mFDZ=jp+= ƕD\ lrh/V$|.'L6ĿH9qNbQ@ =$ݑwxçpd-s8X_w >FBS)gN?NyWGizh3G'' P T\WT:B:/@ *EN3l)်0 T)ZUΕ<9J1$/e;[Jy uڜ)g4E:s THBuY{[@TyEDƨrG=0dJu[[|=1 zE  ")-!R,B)*Щh[7=5Mq_R&!fXITF9ǑZ0uuD<$O3,egtq5frWaMT<]^\2%մ޺}w?``,U卫ջTJlU~Y &2 Fkdd6+LqS̎ަs@slKyEL)DΥ@ɱ6P$W&+EFSa#26[ViGh%Bh WTcE;8iy\m:N>MI3[lg8QBA$ ҹUi#!RX}*Y22+;)P=JRl*uY- >k%f^uuZ;F9L:bhG{gFg=0Bx 9/kmH|L4T$-k~Jd-g{9 kʟl{x?b<(s??PG;Ա (_(qt%%5Ud@&ԫ%*iWkJ&@[izv]?LW߶T,42ZYs#uZ3MT}=7'Oܜ*mÄQJR5'LH<1vYDRPp1(NBaFj%}rER2g&$EJ8Fm*n2@A@ʭGoJomzJW[h|yeYVz4<7pQHʹDbjߴ4PL2bY؊Kr70mN밨NOw4TDfEwZI%HD0 AP$@pJITy,* PM5NXW78 ̖ 9"f⤍NjI A+k\OzM՜.OU/U.t? w 0uFx<+o5+]D!XF^,3#]!3d6撤z\/ݧ|ihmɒOƅ> s}4{T7+zzVtnGI\y6+b!i}ָ()ĸm&zV4"}zs|lA?3nE?F͛ɡA7N/AMAXg bzd=KO Kқޥbzf6 7vxߛa酡Ϋ@0iDcMHta3-+V=0LEoƃ'b(Z2;x>%^?Ubh?-9襃|D%'Uהσ}Yʴ_Q%°+ꍙ3q 0#JQϝ)r< .-7?a5,q* f}<ݯ{6{uȪcŌm{c VI `6_9x܃!Ĝ\bP0 ftN6?O`b&ts[A59zɵB8 bI +tzVEeƀY ׄE#X`DHr:gs+*O|!r~E6]˄?I>Zi/G07kjOlג<9 0û\pRla"f.foew؏ņVC?{f]luGu*51×yNSݶ+S R4q*d47Bې%( lnx?yE^~;yۖ\_Gףh |Io!w,#HGwf0_t nyf [aoYPKP\=ѕJU;RB( 2?&mkR+R3E9AZaLv8QU폫CYڋv]@c82еvHe\cW. 8%gKM"%+g -(w~% !5E)A6a/H =2(>RŬDw% .QpJ%N&=|޴E ?V잮]˷ r'i|buZz@0:Sl{lOM=vlBZֹv>yxz^jOw-|yEgJ̧SU|t;/dS8k67 y땆6ovWH)12O5^s%@b$[_qsgw{g4T*(sB@|1h6 + #9kF,B0#BXYLLu(#"b1h#2&"ݲ9Sk\>w@sO] 6*Gfh;_V*,ϊl850aǔud;h< T{%l(iv!Yp*!!F[`y^eAc,6ʌ2:ڀmT 5 T@]۵ Z#goY_'=n;]|j嫳wYmҔnWt YLS^QV j2&rū rDO=ck.cWU m60Zrk@[ o GAH.9V1zbav_ۋۋ}Bp-? >5An>,)Yƹ#y;X-rqVZNRHJvncM 1-a5Ud^\ "Ӂ+zo76}5T}>!iG-\KGB+ƔCGA309ZMQT3/\/Wl)N1b`2-?XR3KSj1^&E^4!VĒE*L tƒ)RRA8)hȝb΂ڣa8Ҳbhad4f 9Ӗ :,=tBw@)Jq{ v jaxF W<"0ZPB8(Na|(" <!c )@#HiG+ߩ<_1X,hH ?˜Ȼ@B:0,4O+'uㆉ/w*8ad]zE GLU* ilP9d(QbgwRߚԟ`]BXW=Gԓ@Qۖ'>v8-6x.lNt&&Pi4QV9I%(rᅔHpSOWIy> ae7(Qa1F 6 {3(a195>E۟73cx yW$nVX,jUWR'YYÿ/Hd?Lo1Vͪ~aiK0 Be#Y]>1oSʵMǷK5U7 qMUBp"(bФ qM)me( C2TLޗ^'a#ޖ_LsM`^7l4i6B4Q/ Ẇ@Ohf^0gGbM1ex_|v8!$f~EtvrAqzR?|TOK"UŢKʰcןmYs.SKBf)*BATHZq2ڑh %=sexֲj!"+6.I1R,dy?'T@lX# H irB,[0"*z;\DSk鈵*akg$R+$^_sTng-*6(oq2}2DS5Pl8H0azmvs(:[CL Hr\#rp} FNrB39OS~\M$1I?qQ\7~:Ma -VZTMvوxd4LQ)Qa I x& |LBuʬ\[AQ3"UW])D¢ = v[X~WC VۇL |~9+9RxgEq }74,}Nć~P&KT r04ÑI|g9GZmr6#fAU#T+lm% Ä1mijM\p;4fNx2_'"pw.-8<߽fkTj,ZbsMK%"qƁ}gJs RtVN,T}y-W#(.y0TG/;6$O';l DG/Ucg[l4̴|hw0S+ɗqմK'Ï?BQ?+wmZʄ2a^!X3*'6xΨjgo7kW'Aw`T!0v΄yF!j݄Y˫Ϸ\LjҀ #^~Ͷϫg#s)fXq#N"4dHw彍koh}cޱfK^b h-KI$w$uؖdYmiLbYdWX6t\ n*w F'( Dsͼ EP KC$EG>NOPE#DFۮ9k_MZEYGtb~'+Y)飵4*mfSn_bK*p>NBx/L\*Ө8R!'n:=Qn|05J'PH(hfeָ;w{r =;kS@q)H44wdi1Z$ P6qkTǴN*b$kPԚZ@.y{99_г% ) .*-TXfPA%J.X rѢja$:G/h4wS "W<_:ʫZ-Fo^Ⱦ\\M۶J-" mH!/2Y8PbD1#2up!pB:}/f/1ӚȄTd|LPg]BG`!CURRUr$F`uE`_JdfMannVG‹M¯3RI*cz*X+C"XN$ġcH+A`jCs,)-)+qT0zy-QAKJ]ɯ}~¼ J?Ju`J%J]zU fx*+UVC+V U&ȣ+$WLWʕ-W\f* \er9BjSL%\Be1iWH0h*FBjS;WJի+#"/; ]nRƃ'9)e{(N6}bVXRƗU2ENg/>֕ HsXJF`N]1toGpz8rRȴhVޝJUYxO0̟95fyꌟrYOR(gZзSC[!NNTH ˉ /`*>+`>v_9clZNHKEq+&Dx"sB#2 vE+ʸ@ӮvԮeY:7'by3aYX Fρh$SSF=r۾]&b`LkS0-NPD2V`%rJP\f x;-T{*u \jk+4^XwJ@(|ZI‡{VVh,hJ5y""*+suұ0)5TIL H́U y յ\E X"qP vi'66DT&IQibM7|!Xj*!?m)eַdeRܔ ,F hbOۮ?DxSLaib3ˀ 3|9x6]fxMpY0G}R()0r|ήgMr iJ8F/9 ܇Xwk 65Kʫ&y£aՌ=UXXuPWgH0DOK<(tzVR, 95g(SgUW-P娽$_Wľ0.~y5/{fqA>^F'6 iQR^-')阜^z8u)\ Sز96(N v`=u5XȀ\*V.~VzqME#WU+sU[E5ZǷVYų4oڢ2#Q*mhYN}M"ZLه*ƕ#" 3|F5[]A~oVWfٓ+.~lqeoRdfzŏP|@&#zbHV^/)vS'-@+Wިh#o4P?K^(6B;"#*z& :rSV!YBs@xlhĪ7@(7Se!QuSIΨd :gs+)̮ǏdߍIj7 hxOX1.篣Kn]ݕ-uW< 4y'ځC|0]tDFaR El2 lԒĀXZ-EfNeɮncz#l~]/qE[uv?߲7|y4V`}mq#/~>ˀz1flOM܃Z1 B2*ZFy}rC\S)qW KO׃X"#B0,f8.Wͳ+{{Wߴ4>7/u>[zWÞσvK-CbBS~}eQ|}VکT/R)*5 fѲ65WBDJι/.29u)&CZHWj:-Ӳղ]tR`rdJfiZ/iiEm# Ǔ#ɵXrdZ>G6S)H# sdfE2 x O(PjK])g񎔔͠?Q-*=|Dy@ oZGOQ$VL $hT,Hg@xLlpJ&fZ>罥Behur˳ɗHa/crRU&\lAL՜m{nvݧg՜6>Dn%m: PcSY/C&3 %.qtuytY>=>IcOG8lui=;3/?yc007?=RvyϻРlʛcCoɄuZPFcqr\/JTD#ڷ]y]x[77fbֆn*qQRu uGulv?T= F9H% JƔUHo1Iv4IKu;gmf'O woͮ{B7zN{@'6;Yd@'(MGdȎ\$m4ʧ$ڐT@( ުKQ\8q6y>LGfL RZȹ_Y_g=mJ];Z,mz6%7eB%S-*>ڎܥmgGR;R)(|?zLϠ}j.U^\;&2PpZ*i^R4^` Z''槰M+P1 者Sl>n=W9ē~_#m}a << 6p/:p $I#A;D""6*SrGVD:DxDȼܰq 00`AH`FRsǘҞAXƳ*0P-W!F1X ,Qk$ 'GCT{*D=9A]`-Ck C}9$5az8*Gjm(4rSOg φɤ?噁GxGVBCD$TCByP҂Mfb=C ybȝ$Zi#PxTK@_J%$ĢA0i쐧 ;g/G/(:D#-A1QnR2IE"TRq$o8 Ne'I: jAfw?P2tQRڶ~#ْgnݝ4M_9?op}J^N7k'Go1LJS^d䏪R*prhrS(*f:ǹɠެTOUbQPa `p魊H%$8ic! A hD(IZ /ð? ^9 ֍ǽLywmHt '|'@waw7}`P$'dAٱdEvҘ-wZnVyUO-3n J7 b> Vj@;HJ"l1#AƢU(d+yGoj>7Ng#]=LvbNSFp$\H×RP% Cm P| 3)l[DF$*2 Ȕ((aeE0Ϊ,RVzh+sG_itUQ,]|z-)Zrli&o]ޭ,sy+Kho_6iT 9d!cz`ٶ^ zIR)5&$˨%IY #sbY2lJ3svd_BS)XRaj FLfi-5O)$D9"p)f )<Sl򇯍P-C&̉_`SI!& &)=Ej/ fڢUSf8^ꗕ{"< >֫RRIN>Z *< D1xRŢÄ"$IoV둏M(ܢ"?Mw"6e1hX|@M-yS^ܹ{?{lHM2vAB셝B:C~˝;[Z <7P[MW0ڍGڹ*HHhhpT6'A)O_]2po3z>\MVj-Iz Ba!bvF0Ҁ SrŞ+@H-=3Aꤑ.۷LGfF@6dhp(u0e#ùwMB~qP^_в 4z}5v]9W|7<|lPyf*|)xӻ0Ȑeco_q^M sx}_?VŽ=Jc?srdYf̤ؗͦ欨?f|={SatW.~x]8&|]Mf]' {{p'㫕T0zqyr ~[5Ւo]:o9gL4XNXcE첱{!Cc@)׻O^{}Wj)]jc$sI*#J]AH?{۽?>R7''PJM?;:JSP QL)ZXePȂ gVZQ"ݠoyh;wuf v2,=R0h|ӧ-fMݜwFcJ2{݀VU`Qvgg.]QVEF}S!9X9)o/oB?3BmbB.*tl2O@YZ0  x]2z* %Z}_zCU(Ϣ|Ѹ:?Y"dIprr|g{{tߞ?o\ptRsP36r[+kq>TylQwHPO Cs7 _M/'׳V$]]r9xt]wΗ E uO#"}z!z4BibwE}r)K] e"3G!_4GD0D֑tM2,bdd$\Qik;[DphЁUӥBf3sviP=}J./],W׾M1AX0qCY .·5aM3 fWUFrF% ".1@U#xe2o$9#s9#7 rF{ J@CN0կ&<|CUZe=lDuR*9 z@Zve=KfԫT[Jk!'E synw[^5“s;K|dJEվE"޺l,A%MCQZ+B&.uyQyBTEA7fg3'7?42Pyw_xgq i#CEbmm1 KD8 %8B'x[|8@KxE!N?qdb"Og`%IjU0-ЂEKOPAt9xP֌U9A(:A"0LA"Z/ l.J8zՒ٘ث%JQKuE!xiZrF#Em%Ba$G|]hҙѹ}eo$p AwE"O@(3$CA"АpCI L I r"S:&թ)$JŤ)bm⌱lhQB52F+ХD]3sDP ў=6~_ȼRsY˹cMzyrڔGBAwOy$#E|:WOF|FPG&cmA%Dgr];:X5i84=н+5=*5 r D8ox!eVD.{@Q&Q4)#n@MP GF 4w~w5/:sԃ.-}}mԩ&3(-:A"hǍ흪둪뇪p:'Ϡ*"`bbR|NAxDPaqE-*h9a ! `E%lt1lfΎ=n>uʰW}ŏwO?RNc 3HDt#o@N+6JFUgT0G+x t%Ma&="{@Dͯ2J2$^iC`m:!Λhe'b9N{lǿ:/'M[očsRLˡ[ЗSQyڦyF LZb4n(R ArV;L9EBEKšR@L9bSfxQJkpjsdlU48!5," RvWeMiwşh40:{x2ɨjTՆ\vl0VIR}bf=kSUP4A×=JV5lj3 /D W>~ʜ#v5ٗlt%j!jv+gC T ey'4.0qc0k[Qb` 4̢&ϱ&HN 1p0baT̹?Hj ǾGD"27бACf#*13535q[K+BU#`;ŝDQw۵dvN$[űL ?sH0k N[J9nR(]J|c/i P(3i:LxL`AmvCSxѹ:gI[V#/2./2bϋOn 9MKr-[.B3)#LRF+VF\X^-ؕE|g/Վ[i[݅^' {K,bOE Gl9[T{W^VXɳ/Ild*DP:mEVs߯UY,p KCVڠ^XR^ G0LzÐm~Xamk 9P-p}O^%Df"H0gxZm:?5(- NX, G/p ’P:$s]T mssK1cFM1!t%c 5?eJ|NFOAa+xW+iB4cݰ~+LKFebIV;cX"=K"-ylnQpɉx> Jsےafk9c`UqE=Zҫ %iw7[XPE.ޖB7lsm_v'w\}|t7B2-CxOCHڠY'6\L M%+F2 m-VO{mTԽt'W_jє8T 8d=8Tơb{ZrZ >uhgTXGI)||v̓Go9$GZ9Z{]\YZ_ݨK~mv&]F+e&b&""/%$7tv(-g5^gRu|><"%3>jRn#>lWyG3HJ۱QـeOӻ6=N>]`d0tpt(6Ջpa" B`We6RtutDWXp2BZuJ(n:Q#` jޖ :Ҷ?]!J-{:AJ3m+5  ʮ=]"])e )Ԯ4!:030%J\vi=`3cΠn@W^X]JK_LMD%UX-۾@_nJV, T3M\X(4 h-3]iJ i(* ` J ]!cM14CiuOW'HW fy@t$uhG3Btut)mH ۣ|.9BW1usz:rU )ҕBk]`s-ěU8+DiEOW'HW[ 4e<BuS+#!: WjG 2"]zPtt2|9tej6)ǒǮǦfpW7CkqJI4阺2 t{SBhWϓ1T3n|j嶔+ֽ>@p]Fry9ne* w$Fͩlߜ!ӨO愋F ˚,-Yz XKPTZ`B PB V}hC Li@t)!= uBJtutřQw¦gR0h7g =L3{1vVZ/*aR]&^ -K%R+ex&DbDO949ό*ɭ5٪<[YCRvYfS|0̠};/.nÍW5n|3̊;oC%f4Sx˶eq4U J%jq\K,ڬ b[ó@/*KU^Vpcxxslx~NWD.n`o?Qw?eӛet6a$oRky}*"W C..*l}}t_L2y'/Z H?Po(aBᬚڭy$z)n]KPE25|KZzv;兇~k _O.Wn?3fcї^<ۜ8:O1rf/$-qE: bx DF\OF:X?eTs9ʧ&Ot~M0Z5~"㊳\?j["w^[+Gs=W@biRtm0[P\' 6+qMJH*v~rtRx{}BP% &CRؔFIg$ɷcP LHZWQG-q#vMӎy-]M;݀=y H 81@ .@sf6Jr9n\tus"6!L̐+DKXE#Ph8F>`ɩF}e<&q,W]#:(hg~4Zp  ¦EܗSؔ;LiRwXTd~ :T)5km}jhcvp)(3Q*蜟R]=+Q2kQL>F0SJفG}<=]Ȥ1^vBMӧ=tyw 3<|>):RM~d6k6᲍@V57zj iiLp,b!.R:܆gJ$ۨw;j¯f׏n.Ԫ&KplS7*JY>i_u魈A'SΆ7$"xL'YZB 1JU:+̒G*a2`W]b&2}fPw&Ή"?| ;1PZ_w_v{ME8w[_fW==)WAlS:bT2D#K\a L)YՂ#p  sRyG]. UMg}v\ܗش=bky}+W xF"tg5Zd{\OuiL&8eHnI!{# )__57s+*ht Jk#Go5q3u-sZɟr/77@71݀%|w؃yW};zy;t1 px(lXIDBgبVCܗ$A%D"]B0|WJbA?A@pڌZJ/O"L@ȲkB:ܪ,MLIY$!x*9 BqJ mZa"ٳWGQs!#}uH 1F2Z#%*08點k6З8a BMF2IDc{)7,-xlKBπ<Ġ=$fXxL 5׎s(E,[䩂<8)z(}/3#NjzuX񧔹t9l+S,#R| zjVyc}0c}H-zlTQ82tL?Dj{vp\MBVK5@p7huF70[ rY"xɸ̾SRgTݰ$o[ayFSBa=aOeqOY%~*q7}pŲz+F;0t7D7)~m}ly)ݱ;)rھ{Os f{V:`NsiHQ*PDHAIp!/J׋Z÷xj~:o??޶ϓٔ5foM>lc&@g9nv@7俎~otn:*?4[7ڽW˅(7OjgU9v#W?ڿ?r[OJy-/kj_N擄]>}j=^oYdvQM70Ycm,苎S7iA߳?I⿮;^IG\.qz]W!u^Htt}0K{wsAwCu?]z…tӋ@ w%@'U8v2N3nrk5jg{?Xib3֝-;ǰsNw6+g߾>𜒖7MWrv⎊$crfg|_i<Łmѩ7O{n˃=-:)Oy/^ K&K㴼5"bZ{ʑ_iS"Y| 0d6vw0b?$ƀG^< s/ݖԺZKǽE9E,Ԋ9$=1T:^^|'/7\|lբ{kZa o>(NBd[bJyD$8c{x4 2Ky'g2;qB>hQ"q-->"$܊ND* Erwgqdz/H"0(IJ U $хKsݦQֆL98> m<.sv}czC%W)3~ a]9ր ` i_3 7{`YcG?jTNkN?~;ˤOMLMƬNL,Z\6ZssˏWS̓..&? $Py?:=rr@F;Q:qz;׆T|X4xٴNF?O&CϷ0C&?~yI'}1Cupnffa=N'r1#vzL~v6̇kW3 ۻNg&|^v#%eAL4|<\!͝I[ $1U*`49 E/p@Yo╱ёǹ+M/){?6Ma4MOL.{tI/:t_>g2H#B pb_~7~8K-S^}'htv 1 {1<.igKNW.79_0_V<7Ό<w4Ay/LZd=i"w;Eg-Y&7e+gmR41E_"V?' gcK5'ٓ/f<_wI -a HŰdݪ!]t6 n5,V)[-A&w{0تc,^^o„mw$zkcEFʂ5a_O<.`!RI:Xo/ɥ8@A`Ky\r?؏SoIJa!şH:R\tܝ*l 4ȿnMlӦ9UP)& vGѥo|Zk%YP[?Xwd@ї/xFqr&%M*+"+K\!-Shb#{J{lޡv˜+ڃd?m~Z3iWO~];kO)痵|϶^M}jV?URiuCWGQˏO38g7sKy`~dy],|znB\LfLAF,8[,%WG3I\M9Ș%MוֹGok͟M?:y-E&xoIGHSlXllKoN;f? xwt1}o{}y6n0o珗gE^)rrÃqzCppr4'3\ϳtbu7>bic ub$Rϴbsy-<fbbJH%(V*_4Uo%ӴJtF;̔Rr4Ӱy X!H7^gWb.h|UZ *L5$UE`/b|ѥ+݌4O) 1!]sJIyrT~_0· HٝHIKRM2)j'"*XzUk\UBAjkݪ`qWd}E֔}; H}TFZRAG)G;@ڗrե<_}6)~OpZdBs=YRI~҂uۡҟ|/}X_Uۍ g-U~7Ϥo@^TuV78Ss2/:[kx׫Y_au [_ \us})pk]n{ *hu|ApW\_ \ukcu~_du7+ vlܕF߬ߩ"v6`uDȚ#2"߼]_>7Rڗ僧##{eِbxv~3u^=/L?U>JC̻00Ыc^\u{1p R *rWJ4W_ \\Kn^t~py }dm}N |NG^1''@nPo9ýl~\ܭK'}^"Zӛb/~B.xru~Z㝉αߕS1zn6Ll`Tz9Mav7Κ;7T7z+7ʞ~)\SC^;-<,{4k(}+.FjݶsXU8|[P*@rc;vޢ[6n:a;~V7ȾY?5Ly`. bfi*FN%i-dr PSBRd8he{=ǿʿ~{k/RO~XUea.'[R(jd -{͞rLLv٪Dp"Ī [?) bPb JƑsեݷ.y.m! i竸]k-ǠHvCZE{t&uCUph&R*;m}SN koA:isPa&cw!"hD5|+ޡk Һ{@jL 0b#EO{8IEn7(]1<:fxyJƜ 9s,N ~q)U͍&UEyJU9ThS31Ps ע}{} :bjÐxXb?1D;I%1D[RHqu$~Z#MJVh4vZxRNAb1T} YS#'S&8R3ƢbfQXuM⤚F ȎoO Rj#4%|./Vl26XvC>#PU N {GZa\K 8A r,].ڻ7 Š˳Q1cMpmV#]ɛE N)h b٨ƺz֭?s%8. Cxs`GRga-g j#  < ٻ&`PT$%]&cq]b ~Hc [-VCF*&.u @68+}.$ $8+Q s TI545~+,QeFc &UϵS57 \,2n̷6! Ր ȂA m5nMEeJ5gxEGܠnBPr ``.}oWXؗ{cY\OP XK90a3XCf2)"\\+RAڛk(l(_1/ W#/! Oݗue *nM[@q2q>e,TɂN X?&>/AK", k d-ˤ0;XA;6(A/9ІHTKt]/shNݲiA׀R"T2O Z %l-ѧE ٔDԹc[  HWPPl5n2;i mB}&ٙUeZlO|XgHYttNC5 eVA5[i̵rS?z$]koH+B߶ia3a1ΗLA %$ы{/%˴c9E[dt;h'(su^ ׀Np +ȓ WDNA|; ]~&h\)\@iQs\=8!d'U[zׁT,<\͏!3JE ѳd4)DndjR7rN{=PpJ>9FY,J@"TSEc 1Xɡv չ _mśZAK9* ZC Ox;XN,gfthY fWX4u#9>dp<4bZ6ِCL*֪.u!BA[v)&#@RR 1% 𰝺 Œ]w r - zi M(|"@z3!z cGT/zZnlZB/[D UUz*Ͳjm ś7UaҼ+c5zc*el\K)u1f/;.6{m@8F p}uJ' tN }c9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@?H Cr``@Ew h}w!Je tN  DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@:^'dnPN B3'f0N @X@ț@9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@G{>$'E1'zΞ@;p tN G@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 t2o_~gթ*g]qQAg/`nr{IW)(kh9ξ4w9I?SjO&4b>fSP8W7A/frjV*RuRWG^k-Xb9NMaKX-Ri]WUޯ\*\/ًWadjĻQs(Oi?~zslJem|4a4Z|_^oyZה&;ԛ׿JEJU᪮Tb:8Th۫MKlQ-QˏM͢URXMC+\.9 ^53^pS>[SETJ\yUAUɐR%SɏE7}* 7UuU)F.?n ;̸ - \/2h}Z@-KC 3 @;!l0tp ]!ZPDeDWR2ADWGHWJJԀ {?m3#JmZ  ]Zy QJCtuteS僡+{`nh5P; #]Y{ cjGI}+Dih)8]!`? Ws?Bdk\u@tNpNًs?]!J͉~⏬z.ݒt1;bL3Np:*otRn-᝷Nh:Mo@ӜhU5J7 B^5n7 VԊJ!]`i` :?۾JmA[Un(thmAD<ҕ2~Mޝ{& "\=+D{(n($zVheDWp3BR}+DhX CWw8 ZwBDWGIW־p/]+Bá+Dk] Qzz3xtjHCXDj0m% %]yg ]!`CW]!ZdQ:FtЕxdՋ[v@?pgZoͿC=scvNp?JC;TgI؁UDRZ`T.Z4ۖGnU*XY@[|j.l7٩:]kXmK-K(h=\^%Qg5Tx>p8x:ZdtںVSonx~^߻w Yg͑qn~ ٧>s_ڴ'7+f3hWw!yRt.OUPx/ 9 $S2+СbVNyƍ;+)O8| e_p(+_,,.mz</ lO&{j r \ype78R -6J_]>]OǯE[~4SחT#[`խ~_vI7fӼG0Yi,ygudsQ8XZz̃׾xK~:_./g\J uj3g rh_S/g/9uf%[ޯe9>)t9jD}PY_a|&TWR!]rˋd@)jb*\yuu2Fb1|Flb|#Ve^G{O>y(,OSZ+ xV@cm2)s-+BCJ2h6*M(̩RIAI\rݞRA\Wy@%+<GcVAs&`d#Rp4ފQt-qGбo;zn~RWn ׿}[^\6RiVao}˳d$sP_ &<e8= c 'th~ ΂̢..*>,ZAp[9߹V{Zv'iњܸ>ߦ=تfo mҫҫ^:4) Chc4g@ֆ ]PeժOב R,ȧtCd8=~lӜile\'|C]ՙxZ2'Kg] l.RJJ!mLdᡆ D,=&gW&ARfRJ%poJ0 յM2Pt~&Gۿ^+STLEdi—YK#S:^2 zi3uMR3ʓ(O<O_8Ƙ1L5R(aQXIKds+߱v<^X垴u}su_Mܚ|?gɷ^\6s~L}z{I+tQV<^3%5%roQx5Բ2Ru7"خTAE/|58:fA+kf/Pt9mu1lWH](< k_G>AЫv)LNc$sBpJ՜RO1^uu֩r rhna .fۅ#OY׸u3R (UWR zPEr rxcm#O<5euocQ]g2C6D>TK_3С{_UUxn3!H8Of/ Zb}6eJ3m."D;vy! BVx3[Sz֋GV۝k@'6y|ѝ1MkY^wM/|Us)JMI*yS dUb5&_\^V`"*RvM7jc]dDd"X6sYLʙndcF&D}p!AL^;g/lJx:-b@4: \t# B+\df.s!e:r26ì%F{!C4TLJQ叿ع5?~ ㆩe = ^%"gK$(5#Ce7Y~nIYS HNb9RjQgAZ9"$|n-%0T`l5;yT`}V\d J >}vM.,X;CUuצ-ᅱ(UQhE "D@~C5W89# AG^ QrTN1Υ, A(im>D'嬂DJε @62VadUeBW MOk[\flͶc9bOAFhI& @, M2ks&A*Y!*+grNfEP^H6`TYͤY(ʛ H#Z9q2Nw$fWPut]=ݫقQ#C񨽋<9/HYI2Αp#'{&"kcDG 21CHEDKX("QB)krXhCՆs[Q?ٰ;kcWFD#b{Cގ*VVR#[pZ-',DbEA;Θ1g% CP =!Y@JZ\A)ZMF [ ʹ:<ތL(8?1$ d'eģ`Y(xhIJZYy\OU:2>@ئɎ7~lȣ2v1xCPeExD\Bя;ëG?]NZpmwQ1&XT{%iD}<$ \eLI(c$&)GD'3Cia;J>@Aj'Jv@RRci┌n . FPU.&yNڴpRܠi[ﶄtmcE2A K&"61Jm>o *pNe|IoOLH. y4Z:j^,sҷ垥I  ZfP ۦq\W[ h.@N K5RǸ,Xh;=g=Gl0$Gk5^ـ$c ס"iMJkv1$֛f ?tTQ*%q dq^(}(k, h1QƐqTבhc%:G &cw#->E 0VG/$r&B0fƢEvԪ_6; %4LK=Xv?6pkyf,+R^-?ݡ  SpfrZEۨoaip7mTڊj7/9/}ǶK\$.Mt{ڿ"fmyأ1^ GiCj\v9.5f!j-, ͧh-l6e(%OɧN?ͧ;[6_p6H0,`:V*fJl> ڴuzcRn FRvGuɽ/]);9FJ.$1nN Z>7;0(Ž >hbrPF[ νK74C2'phIHFbŢ]*}|ɞh^lV+/;̜1?$[^S1D|jqQ6xgw xD*1Ld~"\$ѐ>@h,<҆[+]>g8|Q1:Zl?qpACk%V;z_e~~/q_Dtd;'o\'B S(%YD-"*F93r1'oϭ䦃H׀񓶜HENhюSeډEIȘf*!xp&{f~fs*Uoc_}Ֆ] H>e 9}xQt" OjqQϓ`nTu֨>uD(םȂG廈,IDK77I@a& FIm2 rĔK ^Ȃ o#Z*YGĒL o|XNVN5օs R L䨌c:tt,up)h)fW;AX|:,/OE iCʖ7@);轚{ҞR2ݞ.!P& B?,;`WHC߷o0Ko+?^eq2~7!dh ׉gߟW2]S#pA~1_KYZv7 |-xs$So~Xz$BYDBmjzn\bYNfZ~XK G +K߶k`ꝟ$/Xߨe j6/`㻟VWC`rv&ɀqpC t6D<xGzɼ0 ͯɋ2e}f26*xr#!'J^}o!m>zU`Ea/ fk~fL#]l{a2P%Em>AH Bc=R4.p B+ENk% J;Q;$s9l`xȊ'#2rN,H&SNE˴k֮ڥZ7lgᏫe@׫U%½ޡ'pm X3 z'4 pݷAԺͲi:nH6gZ6r4nMg=6>j[odOw^16T{@)e<<zO$4חnzlnMssb6d7Un6!m.=6pA|]D^_!TE d&Kpl4v(RbZYs{kiKxu2:lNk!}!^"64KZY].2%Yań!R F%08%f2{g1@3M~;1PZO >bUIlv ݧڤg6wx2_s+OZ *{28ϥ& sC31gCBLK^P797P[ sD2n T4EC oY߀w ܼjWvMuE,_HYn,'wN#{$z+,4ʻD M&ʐtCW;#WDޭ6 F=~}MsP_7s+5:/mJkc8tGI:BQKpYk3I!{]ҽ0xNj0'{.+Uφ|)?p9xQI-Hd,k#:DȀF ;)nG3ǸfKKG4vP\ hm@vZ`: HO5/mRRﭝ&#ٸ_60hu-ha'_$<6p2՜_# ]OE;O%lv?8 \la8f[8uxl:->t'f**{am*%Wأ]<2C}oV|O*;cZKR@iym,푯?|3Mn֮ Jh+KbG%H3}=LBi˕J{H}9^9rkGrJay4G+fsG?nk.:eJ&ZV3),VZ| _F US[L~mdޝAwlM?fЖ%̑0R? /ּ^ YhI%䕋U q^_92$I {XI{do(O`MPTtsVJK㪋z|Kj6>Rʨ`{]K,1 X#6:0,wx 'etc i['Xp"toGAx$w;ۑo۹}7Ll vO;"'_}{xU7&­8>Nw0|5ev3 ɵ I-,vx7bŅa\ c Y;!ޑzNsOiX'N :޷Fhw]gcϯm9]<Շ ~~em4?{ͻ,j}w;L/gs F~~4{q3m"`oޟFo˦7>lz5|.OG'_nzA2fO/9էYER@e*vi4lJ87lcmh';_X:. jXt+Z(.8l<a%3~끉S\𥴭m㆙Ed*qs<}|K9 T^6vҲ L*`@T+Kf= n.}մ5JVysoZw³awq1!{$_犫.e)%fS5WD IR`w]9 ?*8`(1֖rrb.Tr;Nl:h[i9@w|Lb`&2!bƲDhڕ5p7/qȊҊciRqk8VOo}ݔIdFFfVӨГFLӌ"xFlprWVq*uq9&' +PƺB&y\JZ@j09@V2\ L' Pd U"$7یpW Wlq*%Wĕ@0XW(W\pjwJ+ +-a9YW(lpri>"u\J +#Q9@ >^ԝpjLWj(u5g1rWV'B\=\ ^`x,%Xc2KO-;~*EbqWDM/F3="t,X3)3\Q6uÊrKss}~^(ih.z2q(\6gAI?r0M.JD͹{w9]ٲ NnoedYyZzxsxq0\VOVr(s OJ BxQ]O.n-OhM.`T.^H^H[k>w=.Eo;ipn5b`_ |RzGz^MPnʘज़X*O?e)0fķKuk}| urnp%VMeNÿnZ6FiDԍl7KLN+!6gtpZpz^zҒדɧKz:hѫdr'?j#p3hVw_(/ۋqyQ+y)xm?DfWU+U k6BɩkdɃf{7]HW3X(OԔbPTmۼ/،Z(XlZ(W\ZVԽZ2ū$^-,#\ࣗ2%WP PAS) +59=3eXWVq*/ ӌpd W(ec]ZbUꂫ!J 3 6&\\r4y\JF +%&"#\`-ʵ,Ap+Tuq%^e+l^׺\M;ǻJ% +蝔D&'(f+P(OWJ0UFBg+kWVq*)z66>#R]%3f4>2H6PVc`+'صq0OM,{i]0mS%h{ f\g++e.B&y\JxqŔOtbJF=#~jIWR\ WOyFBg+kU+TTq%4g l'X+]SYp5@\IɺBf++ Wq*e+Q2#\`KP.ƺB" w5D\iß"pW(W\pjIWQVp5@\# K*=v&~jyBB\ Wh-sr`Oʵ]ZC,g+a+jcn2>uUz =v=~j167=U 2{D*e.LKz%̆UK-JeljYj)-O>\/cŽ~r6\(ؚl W2UrS\b3W4W X1 P. :PWĕ q@0gXW(W\pjHW2tDWO+I8I ,২'/\?{F] aH .I &2xs$z%;N0=2zM=0`IdTխs-ff1thѫ+F3+:MǏt+k=zt(lyt!jAt=qpZ ]1Z BWϐ"K+vz9/+Feu* ,8] ]1|3h9vbQ;t8饢קwO{ܯpG5_yRCl#Cz]%' tީ@ukkJ+r'ZhH)cи)ÛM bi6.7KaiF4LIX6 )+kRpt(cztePP* bi9tetTdBW֚c+F3+kTt~Ath1tpbc+Fs%;ܰuh2ճo>Npm"o#JoV>=;wMm77^ׯ_vsߞs^i?vGqdPۤ3u f}zlS6DQsmw|gr80xGv8h77|V~N&Ƹ?/`>)}8ŗ&rO}f̔qYw#~6>~C_ȪO܊WoGe7GGy"y~BL Ӣ|B|r63= qz ?]MS]A߽ԷwZ6Wov#lvqtѴB$*T>f򕲱x+W:74CѪO^?^!M2o7?hjs>hpmzdJ%%(mP%egt  (+g,Rt^']?.XݨJ t!jM.Rap`#;j>ק^cGqiF:R_짅;Їj0:Qj,ۻPB!g{hs=֦HJGDzϽ#BuTDJ7)b0YI-Af4ʣdrrŇ$Ztm4)@ HXùڊ6> TM)PR\'R#%IAKAha2c֞ИU7T=F+PtlUF-9+Z/pOD&}IM4Z@@TRT݃6&3 tCt(520T`&!Kh*gr#Q6{W_GRHQ"=@#.dH}{kyU 1WtTOd %SRl QOB6}z8oNB UeX[TG*F[ ރFuŎ'JqN>@{"Bd{r5nj9į3Zk|1 <6t:$7'XK!QkJ[DHp,Ƹ[.ҾВjv,y2jj5Ѫr>Z 9i=dT j,oT̛2I< p\AQ{+TtgqJAQ"Ł.0͑&A(;kZRl eo ڸ]u Rq*NR3Mz֒YAT"Fzm>FA3yǒ_5n!!ѿ!!pPB=' Y,kO-A! c9WTU/<-dPgF\' %_ݠ=bA\ 4WUmAUvND( / 0` zv}k^\緧j*ƷYuWw Fm3bka&a=Cw /M %y:d& jml@hqU +0똆'z$; %tA\xoQV24ITi"Y!#`8ӥi6`^(^b )nMG⭐C@8.̪d!:T?Vy'*S@vTVW QH}_ny'kWمX*ő+O [syY}%BE4=B.MLAJ<.Jz3@]Ii(ʠvКf@rFEk!Y)֎ a<Qy@'^oBZڝkYfՌ.#8Xx3F5^Q0 td+!x݄i63+I@̴lIU ה!P?Amj DQ񨈬wM`Qy8 PY|,l-V blWNԌj-Ѫ w%^!gѝ5MVU5@ jfMá _5{%fLLorC6eHu |4\DC%l0_ ֛.2 6`^y>ද6]NGE>[ᆭ:\zL5Pw]< pdO`3=kl Ϊ4QG]GQ3LHY#h4v˨ ,ӣfidE^AI'? 9k9lzBg 37W;:)jTd*( @k*'RC֡7q1옍}u`n?yE#xSN ~!ϺCE7 F-m. \U -:Q\jGn:T,~ xi,mTѵYcR=5 ޘ[Vj C5kփ*HTJFҝ3遲vT FPXG6%Yoy~581i^>@śk΁ lK}4bЫ8vAi "Y'XkrlMCiN3 _B\5/vߟhJ7a#d*hSLCi"8 8%oC)FjЭwA< \T*M .Fgr\U-1˱XTI~aAH 8iBeN V3f !KkBE9:W-b!(l>J+$XH5nzF.^ܲl[w{o ]`nixFLTWWQNv4LhGYcRO?/z{qw{I`z} r٤]s/o6V&Dc}ta 0 |4qgzݾ:ytxND+>wL۳ۧ{/O79YT:n6v`}zN][//mFŵp|)jAN c\dbQz8Ht} $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@: J%=8,fbeg=K'$U'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q=_'{mdrJ ƥ@@{;Eq=G'G $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@zN еKrAt^pޚ@@  @QA4H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 |@mg.wk}Xrw7?U\_l K2.-nZq77.b\zƥOޣ2%ˡ+d1;]1[)@Ux|t/-f+ i1tbRDNW@yBWχPXEXp<ϖ<ϖњpt(ϑRT} P~ oz|/l.X_bg^y|q{j!߷WzE4j׽!uRc;ݎcoͲ뇷[7^]>||߽t~~}P~uex5 ;)qs}.zc޼ '6zݮ>kȋW/+mHhYK `z?̺atcg!)Fդģ$VFTY/+#^Ȋxƛ334I*{%(/$9<Ҋ{nGd*ؠeyZ[yogcƃe`FT2w9-B?ۻzr1+){ :OcҜM;*vkv쯸ۺ=p|mjzv3(% !aJoXC=K!\-"s4/"k߆X3}vhz<3Q,2˾Tu_r#f*ρg^LK'7FCh9Bi8L[cëm)5te=IiE?RvN.Ga3moTvUp 0- GfVg2YE""[xmf u2GZV#W-s ͤB8 b.p,c`B*zr2½ 8ggb6c,;}^l+QKJj)` s2Y\"8q;%9cS$F)9udayl7pHSk -+lYk RB7d]Bh]`CTk RB Qr++| -+h{A+T[ tz-BWmS0o ]!\CBWVWHWX֪`sޞ++x[ ѪPk+\cX)ĕ M{ R)BWqtBzte6n]!`B5cWVtBTwt Ak]!`՞v[ "Z`PZ;f0BSvD4tJհ+]юnzi$oTV'-v0u}bw,M㨒2ikL[hVWji(M#J;~4 - m]`xk rB7Tk+ΔEtU{ ֨ǍWWҎ^!] flB= =>i+@)ҕT65tp5i ]!ړeoHWskxJqAh{e-thk:]!nUҕ抶j $p \՚+DkOW^!]ni]!`)ZCWW& tBjte!McWEtpUknP;Ltt۱ӯD`AOLW{=*U'ʻnۃXGW7M/e}F1f٭{_,>}jjD?/ LʊʰA&kdSlhjdλd5Y,gkVnw1AM*Ӎ;-8뫈j|~RdZlWn.>|w;wr=?%ߗ8L <=_š^oDZw_~Xt~OiRm= p |,kB_ūbz2eqۇe@ڹ]ZOL{EzTro5+spı;{ :mvoM6_Rlպa O41ݜUq#HkzT}֤eNj QG}M(bΙJy4$h>%ځmӽr0M~[2(y0L{Vn]Etka|=egp#w{Fq-t';e&r=)8x~oxѼV ۪X~3^_ex޺Ū&U5vuvVp^zYp=n/o:c2Y #|޻Cu{x(osQ>I[ (o?Jlg q l ˾Xʲ~-Hk(b6)g}?Bt|=)4Nݤ U3/~uSܺf {feMG^z7;9./?BL;TA!2+9$uIt? y6WA^d/|HLiЅ뜼քtbn{;)+G0U9\ՄmnSޱMJTXS}&b <]:$&$B#H>/TDH)#76;f#5PtD= 6P3.> 'dSV2y<1wƛEWJЭ}&*ua]xn^ U7:/TZev?.8|Ns8=xI40Is~} B 9L6X]j~Zw9K5 h{ȅ+svS/Uͮ NHrΪKaԤW#z~,I/ q$G AX!V3av|Mh||iȷl%j3vMm ]S4Xj6(锔aõ4xhK  tzUQA)DAl s9O'-'%a)^ Ns*_Eԣ(kDm\r?_O(K+ )EL+­)or1(qE|Uo.;UAE= a3e|&YIE;,M6tA4;ϭsAߞw~v :)ŵ1ϭ[!34IX-U02$q ljI#فSQ0}gJ$P!, $mgy~VߛfOӆO*~ _̣t qrAHE (S^:OdJ)d%trԪ32]uͷb ށOlZ)T>$Gߣ`֢rře  C+ lΝԪMjٰ8i?? f?8"J)Gf9)$2tq$o}hSak֎8iv}u$mlMڔ"6aG6m,~{UD%O+]iJ f"fPb.& T\ZX^ŬZ9yfwV߈`$)MqLP5;X8A6Hh 2l:caQHХsc%Ɗ0kwf!z;kbW Cbq}&sSsOVK枔B3N ]ɉMhܢ%vdmYbrl"aƓ i罥B$2`39RYλYuiY G)A2g}ԫZG?3{C"b\668 NK"z]eԛ##Td̆oIWL$qɍHˈ̚PwbO]pv4)A~5*6;9(]ǿ*ow4]ۨA=lrV>zߚY;)x4ͅmߖ(sB(g%IC,;1` uLP })=7(cb꘭uY8"+(e 68O3c}Z9MuYơ\hjBqr mvxR$5E??pv8xE_DTV!M6gp I5/< n{.2½g!mrifc36 rHPuiQMaڬ)m:1.Z*\DPlxei882 t hz'd ހB!E4pML$( #plp>f  AuҮyˠ~|&.[?4&>eD[3#ڎ;F|4rZۓpԛz``&s(B)PB')8EcPwBALDLGREu.8O3Eb\Z.+9xGKBǂ&l`"d!{޶, O3XӪfilO&Nc>dN[kYR }^%YeFbKdXn,#l X TL3:xx4ŽC2?|`X"rK֛bJWӣ ~̡lwcFQF;E?>O#AkzcC´NeAe.B9Ff"muש5"(Ev6+6(Am :dԈkb0!(:)KU@Š8 %:IŜ  ǨM]wQ<kV{[g(7=Zx4>mٖ5N$HK ޴`P9X97- `o8XeĽh Kr7)nMl7yҷ힩' RrȲ̰4N+!}xvQX"TF fp #(| [L&NjI i X\YQςuTiz:$c=a-&L%R m"2&L@%8%L"uvGK^H& NHn52x#O3Blq,v-tb?T iY~p|r'~|Z$7`T$gX8Ɋ0y18%'ϭ$?G?ݙVsz;f2WeY?ЅGKϥL w/_խ Em]O23e)ͮ&+,s$)VU.Wt\h6J~KWzHYV#aXQ.,fWnP(ϲ(y<̲^a{gw%TL BϲV? YY>j1]⪢?̮$jn d}|VOeFR> Zqy BnZ :$^fdXn4%9s)(ԝWvxsZ@]s_%P0 xGtb@&D. ` .fK{o`!sJYX-6@FSrDoo(ٰHǨ 9M1D%+5%`}W4m泋u^_B!גݸ;Ee0R>'mDM$$ۃAߊ*?O"$nxvn<ې@!Q"Kr 'Nl\.߷m}7.G/-E}Sp<M%q0D3jPUuO+%14|(TgRX>Jtyf^6^wR+R3E9AZaLvzУ6"⪗V\iYjYѮS֔.p8}aJײ4gҴ8Z-4t 紘+>-I[nZ m<JTY)̀.*Sd. ˒JB|T8+IR-IN%6Qh=tU'>+pE^,I#ܭiQ>W?{_*~[ _PknlTs65tTT̷7޽NWȔùtQ3iQ| As/Q`:c4m8VGsn9jv'X`T9F$㑅H>|RFDD b FрG!eLDe۽5pݧָ|2~w@Ar&u .6["߯nZU5Y5fIR7rVPQqL06 cƣ@WȍVkg+bU v:Uh4BkM(-( F刊`) NPDе45pz+dms/:q٦LvEgY>e1 jYGK~|-)#2#ނ cAH.9V1 CxnՎGgGG 7s[__o7cIp40%w8(*nqԖc0SRF' CͥPzJwCÁP>Nf͸rPZ WF:{%7}1-T=N,vQ *"W)=q@Lck16)SͼpG8GH\q:9-?Xb`Yd RC:0!-r M^;kcKp3S%S`:j/%%Th6Z~0v`3kzۃDҖ^ɻQtg+y?Rk;D)]oyuMc^ä np F/Lk1@iNv3.|z6c"U F[O GRHŀx ?WyHR%i$uݸaDag/֥G{[}TŨh6 eCVi(%P s[#p|DD_jf?P2tPߥm 6#ْO\;3^9W9t5I4"LI10lT#^H)daI;6l!%[ۧI{fxѓ}Ee(>)|}+(,G`SX`+ U!ʰb6LRoa)NbwJS֮XT;}f>PKa8fE8 xl:-xZk _{ZO%~:[D.9UJ& <|a)QvRnO0ucM U6U!j3h/uzaf_#ì91Ҧ21׈9|k1''9}g'ԍsrL>4X3g&Roz$6𢏄.6Zi WSA4d#E(B3"[FMDEp*Vrfa^ 5wB;fJ9۩N.h˽+ə'_>˽Mvkf`9f'`(Zna۞,Tv lCƫtV?ׯ˟ߧH{a:xI:'T꓏eW@.8};I}Cϓ {h߾mWw";jёGԢ[Fָ|#]wJ|aӍ}{2nmё'[c@?ǓH_q骮hV~}XԽPˡnzAE zA;4E ,^U * 1i9E&udb͞&)wu, 6<z݂g5Zj:R'hu,DXr9.ƃ0= +=z9kRٝ?˘~i(#Pui%y{yș &r&51s4 LyJ𙒦!zpwUR֒^d"_>u,V+wx˅RR,׳%W-%/.MrkVR& 1A}0yTQ͍(sFV;|xpt>r :#f?ޥv&30X ~98x^zIN(y-E9c&r;C*vp .}=n>QSaAqMC AJG،sjʙi7Sϡa9$$V8m0Kmc}J!,E N&xۮ8k_n8Od<wqJ ip|%@ōq,. >bYRf$;rŞH %F`I&YSCܾUoX}))ZrqN_'/8}R7▲}6:^RE;u<(4h ƪ1q*'kɑ> kǝm 6^;I*%fAG b2jmfR1,Eƾ̜{F SBICSuЁY"ZKS.$)is"-)f){]Sl/P>-!tC]/G7|W8%$)Er/ E58eY@òrcZd5a*R* Jq5 #(\T-hC '">}l@>6TsX~][Ƕz-Ar-j$rƣݧb@*4ya}tdzWy MnXeuMnRO^cmAWB*ikwV0 `hM1Rڤ2&N+Rڴu e 1⏏^}boIgKI!QL1dLD3O[{ -Qꤑ.۷,@dh_|`΍,J vo(+ !,9HcJxκر3G)M]μ,=nc']WB-]mws [Xt<͵S5v3pV%{;U0pSv{9f|v6Eu<]Y.)Eo-weٗssV!}0; w^,6>.#DB|&]\z5V\|oǼWbϭ'Ыڝ?on̿|wqˎZh6]7'IlKO ބ|ÄGl%`\w?1.O' 2?7z0A<#{l! \Uk:\U+p•1j.WZ3xVڑ]}pjUrD79 Y|7xa|wu1濺}W;2$Es(C-K:)$ wu/T;<$lR}6]s}~`o_r3*T,Y$+C*$e&W" > 2ܪ cO߽`[|~sn]; k;-ثn;i[Vf?խ|s=HpkO9;>eu_d~]M( voPFydGJI`MĮ(b1!akPqu`m.w[m摞Z ΠMʁzaj(IG̐}ixbPH<륣^-"MDtH}~|~OGS2r?L&W?͍cu]?=pth~oYq_'(>`~25fݕmWWC[P#tlL1Ï՝A}mu=2N}.c_J]t-F(+ΉQ!KŰKUD]*]*]]u.ٔ֓29Fs)_6JHx Uh'ɌOZl\IݚIv;BvkGvDHki>.rд#%Hh; E:QgIDmCcQ"ؔx[,CI ZBH6b L): m*IZ Ԕicڄ1PŘX H[XÆ&%z_sbtO9.5s7%BCDV]H>x7yr֧5kىOӗ< 牨hls꓏fuW߀Ɣ` {y'CGB{c0 a^7Al'sd,NHUF*͎D_ADQ&Q4)Y`]A!@Xee,ͺ|o-v8~|(LٚS̫!YO8Bk֙N@.+:c.X;TZXu֣惱N3"fp2Mv@2Mv2M*K|)fT*&aMZˀ*P ,mY# D兏^"Q u)Ĉ*̃KĀ:ndj:s"BJ Өٟ _$b>;$*h7a_ZL6)$Ad'qJ=Gj*S뼉V#%v/3-e|PFWYtz~r|J*^_v;y/r#ܞ8O7f?>Y#*sژP5)諨i=06~Nsog^٧.f?n>Ox<=`c7,)mguؾo_Z/e +gt~[ff]&cug=b*t>ogw_bb|Ґ0R!䡋K&v>1y,/EMjzlXsNId廋3o{]`Q˳9gNϦG9 &?y][MC}Iݜ0k?Lx4NΏϦ+SOǟΟwEDxdTɁꨋ>i7K8tmj(#c\¸7麵y?}\%qAW-1|p~rvԃv'w+>McH^, [C?v,}RuOT1S(eThUEk> \T9M}âB6)G>r=kI(fuU$~gI{H+ JXAdg v:%)WݔH%5%n]]|]zc e & ¼&BP5;LjnDh:vF6x3'y|t܄=/Dx٥v'(XO4-5[B1,VdH  `i];lyD?dlyQ0JMJJdԈkb0!Ĕ4"e ('B|u+ј0!)I1jv (µfAk>hl =m%7=vbm:>mٖzq 'X%؟ xRR,R@'Y@Lt#B1ʈ[+(/pt,e22l=RN@4:&ѩH/hEwZI%PD0@AP(@JG]1# yQBp #(| [L@I-07240T`BseG? >\%jcJ<&yFLBidQ"@]S*MDP„ RtP. Hh)r rւ/8!4I?%pw0O,UT Ns,]Ne8,yI1 Xr$uӍi5WN?OS ]~>?%{(r09vV} RD!F0#_|rҜdKz5ϩKM&1ljc[8KbϊHayϨa3`+(gb܉rS>1LpY>A&ƅ?aůy;r%Gc_0F{@S|jf7$j_s7),O)&o\^"v$ɳ7K3SKn7DpNbNl.lb4 uYkWÀwD-64kB*M¥ ϰFO<#gyl<0zb#>l~N,g U#x}迗XiEB:k Pr_SB?yEf1H^5k!f( HԌ"F SHRL.kyOXZEv7a7\Uh|/}_xȚ=yŶWZܩ7|RH~ !:8G_0@?K<4 l#SeA㩐B̄Ny{l+QnL=>&KaG^ !a[,"a\0`;n5a!(5܊zϷH}ܵNL.i 4FtxY}9mRdԞ&Ji0K-T(" GdSYp/XBAtcu j @ZK%@;E-V^dv76H=& o[ ;gSvNT#[8S,]!i@U ;-Q1䄥@ThP"ƑrW^)ǝ6K] ;/rƀ CzHm ^ ẃe5X cz=ri+lM"6]ͷzƋ ݆" .¨ø$?OJp(VNai}W{hiEHo =gG&624l3뺰4'ҴfJf]\6)A.Ska/wFiUYKN`Ā#C"!#UJy2u G eZ30{-#hj@Ғ$Zg@ޡ*%5X5 BŻoGjdtVYdm3Gg<$OؽM{k~*>CzĿ)OF&R2l%,q$ٛrqA=hv< gE,u%+ěf3А}R(A Y4l6}ǐJWuaUv5Adgoz޴c@^djUxQy;IEN ` 2YO/Nj++Rl]4GmRxAW@m6Ja\$ki w4.&r9F0:]VwO92r-5vj ad #NOkyo|5>${|mn|;kz}p~+YO~uMߪ2 |`%j6 7r0\=N%M5˾D@pU>]oq9ּkYKvpmx Au"RVbeɸ# LT$[_cs-켅O_QnR6|iL@^p+ uǠip$4#-\UMvԀaqph:h< T{%l(iv!Yp*!!F[`SiOeAc,6ʌ2:ڀmT 5@Ti[v ިg}t- pyWg97)ݮ,',nY.LyȄh cAH)%ʖV1 Cxn ՎGgG^4sUGYvn}Vkо݌%1˃T8`$8Q[ttVZNRHJ$a7g ( } v9u=`87v[Гl_buJo\/pbӎZF"W)=q@Lck16)SͼpG8GHԹc cxD XK,%A)5"}`䵳6d =>I2E4^JJ8' %g0li1=Z,9<$ 4yw_/Inlw#Ca/R[gLuܦ1/xPEaRYI.,$QD Ax5B`S@oF4';Oc=[YOy5c" F[O GRHŀx ?WyHR%i$uݸa\`'Ϟ֥G{[}TŨh6 eCVi(%P s[#,R|DDuUBm(I ;)Em[v{;M@/׃nN7ꌦo+ٜ$XCѤI* /FT{;Oskt\gbU< Gg-CEItk.g3*(0 ,fsl>LR{af'bw)/ld`eNzX 1#77ĝ#Ʃf'irwwZ൰4JrǏ~8;ltzT%)3 Y/B@>}?!)~ycԍsrLޥ4X3gJO$6𢏄.6Zi WSA4d#E(B3"[FMDEp1 󋰨1ƭ,};5ɅYz;JrI&Ԩޗsjgf?CGD),rv gŻד7 /~risO^Oo|} m.'%tdd=*NCdGrrdJ/Is僚M72Ms,͈ۢ#O :M8'=#r9>pU]_/ZaJcaf]{K:薫'Mؙ^CS`Ub"`ߏx3ФLgf;Nk G^Fryo'-;FK}Tj-Kn'D"hAPi1 %Miš>;!Et|Bwxp R:(Di S[JDؿٛEf~x }#aJ|Xh$By~\]dY|p ,p/4*IIR r2-\H_pALpoCW'jrxU_P74'KY(`|AQbot4k$ŏ~**ObO|zЩZK"`cUu2ē8}c W`536K\iKJl}~7[$һqTK *ePXPuh"8G |(M >Su^4wmV|Ssdֺ$lĺfV:J7sYg 2jm RZmeJQT/ Sŧ썧O)|kBw1Mfz'Ǥ} Ik*D/Tk'S_RdՌ!T6% ͚Bdwʽj4 lĩjm4kEG-c YHT=zXaHAD?"o@fc%}Ĺ2?}^;*viqbؽx jh}9Ƈw\|{X[tB?A v+L6C{_nzR࣯' AͅWCMy@{B[JjmKBLjR}RS/SNٷelMZ8\o'|<=_2KN[&.%(H.2 x)ajsUE$e͝w~ߕpd}5dG6)ϗzෝ΍,voG|B k \,ֱt~ND ȇŅ;8AHS`yPG'qax=. -c;D){*!2B 8DckkJ!Ť"Ti%P0^?zDga>YH$v%̵PY 4wjV:p OWbu`ઙ|(pլ}jxjVp =U3\5s`Jesf^!\9rzr=v,8X+EErpiO*_iџVO' wdHX6L>5iSHPRb\}YG\?5k~9M?ǿ۵X[}M'ߟ!t+wlƷg(G.Q N>M&6 pˈQY:rsJDk vXSLN~:фT#+O東2VԌ2V,2Vk@)kgKI`d]HQc(C)!녗K#\ᬽ| >ꢣX]a:r!1)3dH7sk\S3G/?ov^;|;CgC%9u=tkwv{I/W6t~|κoN:||rJ]Gs` 닳ݦ6 JUn^~_ "<5+6FBTŧkXitٓv/bnk/OÆs-+˰̫gZfͲޗoBBNc9:9k@>]ymv,sBn%|ttr'~WN?}>~[ 7]#"Z?1o'Ge?m*~3Lbaܝ^{NzƊOos af;>];ޅ~WCzu۶]Vw~PzV3^TOFrn9wAQjqژ@_(f#q xkB*kt*aZ57:0ǶY7"%D!`苵cbc`):V)9w/6 Ӧ3vt帊;6<[W/OrQ(&DlUbc$LyqNeW4\dp.e8jvb*&[J-2k-ZA߅V --dՍj˓'8O/suUd!bAʰjK GQI*~2z_/p,G6eL\۱T7WVs=Į@n|iȽBtF]XWab!XIaHmVDĶOej5sqnCd_ηO"qrʨELtUa"s&ΉC[2d2 E ]{s+26/sn#d_pýEO6p&ne\"9eZ2OrrK"Q6 #䅒MHOdyś7߿19eas.k=g IOM2)"8O›Ck$͞[ vȫ3*mN.jńcgj͜{}H?H-Mʝ[m]֫?4L&$%f~w^ V;'^ 9:b1`6s=ZT[V(rz`v) NN\& 1N{;APq—'UgĤ"sGp@aBN%8Zj$~˙T+fͅTkgks r(YUbc\AcN M:jBHwNJn}BsyI^y~(FcGHZ\5Pv$8"<wȧ[-:4~r@utj)٤&Ȟ&O:LEYUҐr{")7 3r+,܊9 (QW%u6K1SQY~%9YNb@8X`(=]C9Gho7Vvbj ͷ\_yתfoOaM|:(oJ|t"߬~M!XO8Ze1w>t`&\Tm ) Uf;$מuv <ڜHH5E>3aOUƔYV6 CEUUeTJ(]5^5#hs 1(TLUۚu1 "ά9Oq+dϩqۿ/;-?>F4*XPb@EA$N~MZ 9:%Ux)=Q 16NJ}yO//=7;dL^.wfMJfhhㇲQ۔:քS!;SFSb-swᜊR^%GL9USFsP.SB^٠Pz#c7sJWWkd싅3'+ڵ]~jxRŗEZ |W'LxfNb(ԠYY(ypFU72Ġ '6`/dlƚ)!D g)`{ou2nĎgy+Ĝ jwsAm?P{0[c!hf- cul$8#0K&,'Fo[URH CY-$XJh 8 F!a%.zWw3n<\qPtz<3"@āFn:,|G Dj2-ٞmQ&hbV,k@b&x031䅞HlKI"i#5xw_^܍yQpq1kV%Fɞhl_\4z[DJPP:jKJKdQՅhtͪ18y^c_f(KwwΞ nzJU7\s Q 0UTFniTFMAh[ 撢Q*!UZU ;-QM"|RiB)O (J9Qu8_J yrJblHefEI> %gJsLLeO, 0U)\rVɨ0_hN; T1;HL@KC[gqqFfy׉ %e2,|2!ȩuXy5ls䘣)hE\aZqmwF~1X9MG]r^r-# dRKJO,委ʅ=w763bPq8.nY40(ո>W۹q;Q[59^d8,+oSgi+lM"6]Cq_>MnG>3tkT|b\߇ ݰș}{xqEH)og6(rM%q2v Wu|PH%1R}(UgR8XwTEH% -9l#!LV|cl[eNpY4M{*P|}@?]Ʒil 3ds㓞pmu.}@!aRj=$a EOr$ջP[aTN*j՜fZM]X@MCІ#aq9#ީ'0bQG"`bWI-a$EV 1=qg"WNg_>Lȃ b:5@<|eSqmv Ηv~]Y㿪f\K8je 8@1DQ+acFIŷV 1);y-Xhm etۨQ1,e:Qt] )x q2ŵK_rlSԦv,˲deާ|_ɼy?Mf>@K6k _~Ґׯ~obb_ |IKN oRYxoO# m$(1_; s @ KO)lp 2eA~9KꬱZ _$1|y#\DU[nPwȋe9kO}%{ ݯspbQO>h%^- mpv6S5oTǦ{yX^}|y֞կoBiչ#l~ D)dǼW$> BJA(V|S,,5>:ˎ/o群qCn{IkнKʅc4q.H%Vq;FSRF(n,AKV vn0;Aφ>:{-7}1L}=!iG-\KGB+ƔC8y g4c"N= sp g X,%X̒ cBׁ i>0Mh:d2XH@`2"k/%%Tᖳh6t:q0`3{.Ǵ-@w{DNtg˼F<X<`KﶼlU8@-I' p hEgZ#d6hJsg<0l~{ֳ[<>+Rm`p$Tt@ c"" R°ľ_pt2Tԟi?I~URNrxX8&HqJU(%X')\7,ΖB QLК{8x0pQ`B3,1}B2̌l? _ \53ּ-*QVTȢ KMm}'ks>BfÐ&7hX%(rH^EE}3 n07y'0}IV,5"DL!IIh?]pQ9lLߥ}36x.ip%l ]4i)l8h*#F#Qf:hEHSSM_Zp\irݷs3^5ҥUӝ*K2'^/&5RiRbut^-̆L5 RSe*ZGd0G͌I˩j4iJ${d>Yq[w˒{p4+J8-x"RUOBEK<֟&jǒ[ 8xHDGTZ't HI{b:qW'9ȡ: 0ny0TT8(Djk Su$[0) Xaz ]*0JOp㠞hOZ0'ϫ^pɇ5C)}O <N4jv|)iz{R"_<_/e9vZhp):iI\ϖ\ʶW̿gy ;GnOlʌb})7p"EZ07-)"t|% D="PQ _utۼ&[ב p"%1v2JaQiƝVc  B' :RIXU)ӝBAj ‘C>-Rh&M NP*\ƣOccMR1@@%Q< #&h4a2X1M瓺>9,08|,sPeܿϯZ!FzV/>O f0"jιK~ M&1lj[x|^s}Y]d [ ]!Z+D)CWrGKp`}P{;+P۠乵a]CYJ+ޢW\J9$XF0GTo]{)،h3M#\riD+UiQ*4c*]`Ǣ+S8պt(- 3m)ˈU*BϽit(- Jpt3+l6tpɅ-?UG;s+ɹ*#oW\ Ԥb8GR\sӌ(dCWWd3#he Q޺:Gѕ[B6tp_t885]+@I ʀbyFtgflbWЮ3++$%&#BRdCWEE+DAD ~Etv$Bzzj;Q{(Z}-jU;cyW] \6?†%Ġumֶl lNiװm?FxbF.Xߟѹ(41oT74]Z 4VBY~mX3.;}_l]MGU|Hh;*ղDi96VeDW:BN:nV=]#]q)iKĬn>thU+DٵM.{zRKe3+h>\ ўjOv( J*²+3JA6Ѯ䶧3+Lɉ0?hE%=]!]i%%)v Ɇ"4Bts+ 9%2`n \Aq]+Dz:Gh2+lh> \ ruBtЕQ(%$.Zk0J. ".jn-?S;mQvhىhJѱ|3݂uOjM͈ },BBWuBtut !eDWJ ]!\uh٩fDۡ䪧3+n`94gr ]!ZNW螮ΐ\ +k΅1Ȯl&tu>t%-JgDWXq ]!\r+D;jtutP:Ay>++u( %=]!]i )V, lBm Q~fjJd@eCW0Bq ]+Dz+%!U+:.秥vhՉv(qiAWEtscB5>CPKBI5E-X2sLڴ v{Gw m1fc4|!i%dX 琴l6GwlsΩlF1 8m$B,Ku>~H߮uq+7Df)%xkfwtWpM GTTZkk}Q)1F"؞RU2FyLA# %%vA7?ܠ\ jJb?pfi\Z3.#\r!2d|2B7Ÿuͷ5mtKu55(txj}RnSM5g~M>z+iZțyQ~@nOq8Xm1n1Aո*>i-?]n= 뾍`-*נPr9[BQ/ CǙbnzV/5*?5WHbKc"V}J7A N*VXq%|z]81?Q=|@LV*s,<8LˊaXyLGvi;D 51x rxW![|-7jYpKKigdou8}> ]ϗu0΁ Fiu8W%IZ1 xJO50F6sfUqg!6/͟ScMhތcE> S$8x&%q#L5p9}>rs}kkdqż@#ܭq[rh35[.Œ )rquwjc\lqFlT@)r`Wd…z1ټ?-߰ڈۘijeQ %PڛT'2 Wԁ3?؎e~oG}vԣy*_꭬F=+>33moi҄F,ՆU?;];9XPhK]!J )(54K}XY]Ώ|ˏŻ'''c2$t[|J PD9 D"uf$⨳:%%3xCmJ } {KH$FKH]<=;vhX>e5e̗QZI2eP^Abey0%7k"㓂tIAP'=_!CBȏ&a:vMe[`xF0`JY̯0{~ 'jA*]L@6+q.$7}kMx۰<?]g_|%=`E}>2+@jL`(}HB_G;-J+o&]mW&feh@{eܦSro%.ز)uD40nw^z EQ}Z{3~ޖ T7UR}ƬMn3Y?U-7Qjl٧{*ictk{ 7zx}Oez$*xշcՍο Ih)t*E9tkk@wo(c{*wJPj?.OWsB2۫V_JPXi>y|[˕Ps0No}6@Ȼ%z*^m\:NeV?hQuEwMh|Z(S-7P|ΎzΦm<ޭA;`ܭ#m'1isLwpcս+1@!fy]i/j-8R҂:AiIS7c*]$h/Kֆ2HE 1ڃ;TYK\8qf8Nܑ{|?jg}rͽlW9>9>SԮv,OYrεh#3,f#3kU.Z:27=ˍ̴I=֛vٮ{iKv.%H?W-op0\ Lr혐R@2˼K ^{Ƀ1Dk]?{۸} ~Wv]LrFЏj[kYR,yg0VS,{$[)%>M]+}aZ)}1GaGZMsqXxov9&9Ro=85~sW†" JjQ"cY[ld*ZI,X8%4ID8D(ܐq/zAEg,A?AU^*EP9!"CUAeibJ &SVV8S V+L4Q,ZӂaR%v @i+Ӝ,xGJ }ZvO'Qk.MM>ɧ@:^轲1-)ӊD&t,3%MR1'1E2EO;gzH)zzI1mޅ$f`*bזsdQ(0)#qzӊ)溏B`%z8}cx<2b26gMbX" ցcdX/y*ʠ{o;W!ig?01Ԥeٍ\պ?ɖ6xa栯M7._`IXҭZO65!d\f_)3*l pM2A5t _%%n $|fCPldDFE+:%Y'S.YI^_%OfѢ/Z}H5OŴNӁï\cq64?>R2?0'39Ye{iQ_?YǓ5jtĦrʉ-Jb zJvlrIϥ SIar=G2:%o2ڐŰ~7fߌ¿0.~D̫R%<+XTp$Keq@srd38Wq+?TlXqVoY%ߣ cE/bȌ1{'tV$qʡ"ϲ!/ikWRxÚugX{;م0krt5gUtps'p;{I͹뿑@gZnz@/4aCW׋f[yQ^iC7܇5_3m|P=MgKi;_6uD,*Ra/GCa f2z?`dJ]y {[R9J2#OzYx@{0I1o3b K2i`t:{άx6p$ڈёC#͓[<:?}\S/QL[z. BK[M8蠒 |LNG4B4.#(G+yDzr췗T ߁vRkbR @0Mi]@T $<<ƅ&b$z׎+y5ͬ+J[/zyҗ᬴hv^Vc8R<Th 0VfUeOzdN+$V)pr, &1z# 8VO 7gtp4xPH.sy>mG l$ֵdŰ|Xxln濹ڠldx=2ȔBGNni%I+M .zڠ䝓s)=F$Aۻ0oXs0+/z-LOI90"hR@Ds EXϑ*o:m ?k,ɻ,M$2GȬ*[tt8h.t;Or`'&\~\^9Np R.0^CCO"C2Hq <!Gpk_H5NM~;=jڇCTdIb%<:cP%WD暵l2 B*R&"eCY]Q!Y s6@ O_tp2&3O zc ^m:@OaZP*pg%3K ? 8MfdPj5nbgn. ??P +7v85~x[#\/fb $NrsNkl5?MÒ$OMru|\W#ሎzp3eX ʳͤK58fFރ&|,41uYxF)yJJŋG5ެ!69a0n$*T/}ҧxcmW7xL݀_,o7dfT EK*0΁4LqBNכ6 lP3r?~\!]s&fBI7` Й~D2o\'rT4Z;Q0Eor>˩^S4t~re¯~re~r4[]'Wf)\O;'G^QzVrukBPY+d(9Rͺvma:WSr\wVAfP! )s˽J2QĂN, O;:+KR-5yuWqXI^s`Z%DH3OLI9{B21c Nj]i,D $`xxLh-< *Ece+:g>X%aIE02}vNP< Uk#3+!e; X@Lkg6/)YxyCvm;k/{x;2BƩ.4'W h 8 ]!sj]~n~\NOvk{^g$HӫwWH.VI 1ͲvksqMT(kfY7ԓE(9rN- gQVzV)L@DI[g>![63ÒwJJ!!D&pqϜֹPB&1k7 Dc((%68Ɯ P9!h7 ښ"ClZYt&24K /Xk~7 FV; R|VJ bS.sg oDXs]`LHˠ8zQNQ+hSA:[1 ؎!2D#3itmeOL-q!cA`"+EǤQdm4kL@ɼ-YkٓϺU/G~PiwW̓)eLɪ2M$ӈQ;0:#*= or_rA"$!hȌl-\)"o?%8=hUKlW*T%Uk"-`ʲt+A!ײ(Xӧϙ־L(\; e8P)#jF٨FRwdcؔj2(SmEf2G;"c]^|w4!zn%.ϖMBF!nuukA$j::!aL6yd-V5 ZV2BJjCUT8vz,<6uŃOtڅE_o.KX:&oCc`Pәɪ?-ŖRjr /2K2EEBgA@K?2mjX{kj[ ٢G1%#\R)b,$l+fc }%z6Jn6baQ^+/64ޝ 2 zG,}xn;fg8gs{|vtzqYϼg<.yٹ>i@#N::"tP' &4RP"TH?5%tE< CQ Dɀ QJVDC1 <J XHe0V)%RZTJ$֡B1SD͆F@T,XJ̹D!Q= ҝ'yf^Sc]cyOU,~*XAp>D}3mN}|Yĭ-:A~=5:QMU&g;gwY(XJQQD)\ B9I&%P2fyNbQ% TΌ, dzH4++cQl{1OͷN>7myZMݝ 6)3QLSw?OKϺx~*2˷oNPc*+*QVCuI% *ڣN=zp̀ aFhT9Wb |!!Jr&&(d4PڲN"p )3aYTg[ofxv(:?o5?EDj4"∈w*7mao!|GTad+5 p-e̊a,6Ԣ1w0C8R j[%VT뾆̹?dԁqqޭkP5KZ͒=qQ(a, % !}qe!uE2 E>H!l +BAF\| \LyVc__E▵"IG#eg1 ]ph: LP>`l6=;˳c'kԁ|Je6dy䡨`|e \aݦIËQ΋Zo6IG!_KG{ZnZmkyT_a>Bso&gu~>.J\s>^LI??즥LP',.@s?/hw3?PwVv>'4(F!tNa J΀ei,66vzNJŒe( Y8Q٣TFe5Jjl21%I "fUR[fh/},)*6ksj:cl:6t:Y|OsNI@tK׳峃=uJ\p_IԠdur3ԁKh)=WX:yW_I3,Q<䞦#R,P%ءďɞ1T8"54Bfpi@i NJk%DH PŅT'_T2q2uGv*^0 d)(Jƿ(Eۓ}J”(w*KD<8j c!Yc!Uv3ɋ r[n}ϡ@oLx) ʤ9*0ZjY帒5E"Ys(+u~^LXW̛k)V]i@]E6 ۚe!Xw-v}.*Jc=ml.kCI̫YN=1ļǪֶ_jfy=w9Ѕ2S;JtȜE.Bp m e^A3cuՍ/2FGU6\ TUs0c 4QJ,HEVYL$,$ DI9;'4D ->' Eu3svOӳq3n(nDji3MVg14X|պE{;:ˢF oBœ62MLјl>=XalS_&DT[ޙA;UCE ePJgdY݆d J"Z׷lfΎȟSS)(RIyIQ U2d怒a&]LN:Z&3Zc|4sl=cokBW|9;E:}QD5x_Ht44,+j.cF JEK)WaYZ -Ԍl~Zz;؀|lc0T_W"E۪72X ^< g͗^t~9x6w`B̪r]h;cK%FbL{;1͎wIJHX=cfa_F,ꊀ)+Ǽq%%pw26*sL@J'O!GBx9UZherCڢg7wĖnk 1M*%QSu.4}74v,l>_FR^={5ly}ޛQ/Ooϧ{U{mq~t1Y{rw/&0IOsߗ?_լ3m]ū)AS垽aCP= %V$$VWsJbuV[OҌ_DbB{VmucV|&v ր'6AWI9UYFfUpB{q'7gr:$(253Ԯ 3H ZOGc r)I9/vo|qr>Ѷp-~MQ V0l&7+u_-Qu;-4F Eʻf;z1,=~8̉À߬RͭCuF{.vnF*&FV}E@KtEԮd0NՂI0(2e ]&Wj"nuS˕I_CCf k׷^:U/6NS}m.gj㸑 #eosl)ƣIih-HjG v; /Ŋ1َ24*nbSs 5N(05` lΐm!ݭO-W{re%E7\RksҼSI, 6-yΦŶy\dqwZK9YřrwxK&ds w&up-~4[F-ٗj⮵Xs,j;4FҪ`5L͍a IΠ{Rƌp-w/?—2mE oAb*nVȒcZ8am̔}_osɶ1kHah`<9or a\qց[ cLRmGic$kA n3u͜#an6LOL_yK6S$x\h:WxyE̵j,{O VJ,^E׊>8Q [=j@N TVz½|TFq"{sslMAFhwuTKXK EbQn68>Lq3EB>JɂAQm)-v /@Ѥ HXA쮠k}L~.=7 !Ej6w.~B2@Ơ) mކbhg$R LP4qЈDA[QS*xkAwE"ĩeB옔;?%h["Y ,P% }c>*Aέ`Ǡ=ΗA=hܛZT,g Q!] QS 0 ,FRP6Zg_#AO;]TbfvSW^랊JD;_l) y~+NX_T"{`e"8r3- Av٥ΌؤzcӔ} "|阄u7~]Ūɺc6(8i2Ƃ%;QhpD9!#r(Ⱦ\<ΰVry&zD>?Ȼ ' l3RFB$icvT S@sdI>Se@:)T|&xe0Pԇ4PQ"j䋦 ]1AoJ#HTDMkQRu;F+]7hn 2AX ktEWq*Cp_g,XOESUߙ~. _E1ܶM6/Bpi}c]0~vL`=^L9 (T%G.p /5 *nT?DЋi e@ pNJRh @z)(vlp ԁa ($ gVT @PUkNkyT [؀$>z'oJPaF [CuX}1>zFl>.>97*3)#GP%naKf/=3&4`NSH (бE>k-)[75 a[7a)=/X2ҷ 3f4t ~jPI ( HNrEHz&G@#z{ `}s8J@Vh/\oتk("Q;mɹ KKJM/o`\8`J#[U婝x,48"0~{PX\3T'pWp `Nϡ;`c{AVCHM@pU wkڨ@N-&t0oBjOH׾+EF.e[R>x -$jfawXUd S5]FZ&@-B9bԋQ@+BSTx2`plri6CJ!NJ\UKSV`$UPaY4UMYg,\hI:]MSFҧ+zG-EFF9h'Հ҃a\,n!wz-<*G0t r-uS`(%uCs Fߜ[o>o^>} at3j?ސreurvGa]ޮ/-[#j_J?pK+izִM}ՀRUp% w:qDߗԚt+)aS-{Pz تH0Klƕ YOxx7Sz95i=%8El[ VN/;xدmCn"d8Wagz7CJOzi'F&4sgǸ)ϓ`/~QPB&Qǘx2P1 HYTz2"q/6^_q,M~Dm;U!,ʶ7eۭ%Bz&P$yA-:_JMsM/3%,F3 ho7j&}D&TnMosSc^x|c\\~>!O09:ŸqA wl>oَon)޷;$TˁX[Rv}a.QE|i 꾰ҧgP֓K_l̗Fwb l!|5&'wiloi#2ax-[U\^תkS=yY|ѳ=,չPRs2l8>sL5ҋmWQ3C?f"f#F~NiV)Y8lo'\Q07G6pA~dG|u[3["sD澝}Lpv>X5`>X5`>X5`>X5`>X5`>X5`>X5`>X5`>X5`>X5`>X5`>X5EQأK vtvtXYK؎s˷][n:~Ѓ87VVKu}FS)A݃^(8;oSٱXx6^ٷ_+#ZEej&{ЧpYzi=軗tox=ں~OS+wOh;.1s?}8yQ\iԱB!7b6jĶlx}.`YP%&}M':>/;Nh"M<}jI aE-6G023|WBSP 2 }J0?]HQ=*DMw^o[9L[/>dio[_ 3bLXrFKh-9%g䌖ђ3ZrFKh-9%g䌖ђ3ZrFKh-9%g䌖ђ3ZrFKh-9%g䌖ђ3ZrFKh-9%g䌖ђ3ZrFKh-9%g䌖ђ3z&Y\gvoIΈ9#'/g+cZrFF9wzOOxk(.l^~ mlU77ӼinoBm]86OIV3YԖQ e2;h?͜ M'?a4!@5 '$M-(ՃV7j0pTz_{Z8-51Pp?ǭ}(?orqyĺw(=^OYn p/ܪߴVtJX'&`ʏ//Ronrɫ#9_~k|2׾xMy4m=ZJ"QZpz[aJ2٢vD$%[\2fG'fi<aipg<5ɹ0\EzmWۤ? Wſ~æZ2;p(=1ִH-O=(8_qCrJ(7n5NіK~A^iZw6}dQEjrngRr>NfpƮ-y*Y{wJN+k/{/g+hq!wB=Tm4hG'K n@qIrM% "  ػ6UuH~8$ .#,)L\WY9(/CX%NOfWT`-OtML$( GQ1K,F5&i:և_~1MP1OmVX5NMjkiOQo";3ay[Ea6O92*’N5b5jٯ?$XjzqִuNθHxzѪ^21  ia`ψzFtQb* TgMՋЋ;cXg*lyEv#F19cˋ4/@\%yˌnW 7_+|pnW 7_+|pnW 7_+|pnW 7_+|pnW 7_+|pnW 7_+|p'-t8]YxzBx/E[uc5z\'#NY8g<#;)0|-Z3vOm"?"c.BQ4<$`XtdV=>ƀp!i5]duXs9BDAQ S %o~6[vW /SRo}JFў Yza]!xMGY=EO7!cϾ$Pd4}X~]@r(im\X!t2mԒk'Z&X?]Bn88.V8Y`~ӻm١{4tv[N do|rRɱmmt93|G>z:4y?ݤNwXz 4ק4aZş@T|^𒋞{Zۖ~6?>ƟF[ ԁ|{p͞qp?=Gm\X<3? ;F\9i+Zs Ņڰ?꠭:T.iTp .Y%+d\KVp .Y%+d\KVp .Y%+d\KVp .Y%+d\KVp .Y%+d\KVp .Y%+d\KA?<Œ޵еnxR-z>ef4o  ͔ჷRr/G*%%cɽZW(үӼ-qA=ͤs7YՀwLv0!t672ꮷ[vs*Z 7բ'EpƊ7Z:fFl._.iuœR,RhxGJM/R-:I!9wOά{H? GraNzLdO3W  #JFQB\d":ŪC9а܉o5YscUUR']4*H@x!c遐;璯DH0('W(`t2MظM/gӰܠ01IC{ _t0O&S/+Xg65 CF’q.$C;:Gt~cm{ƌU}-ѥ`8l\DM*ߦ`)PL+0>& +-ƣp8SLYx` Ї0@^L钌B_ׁwk<(iG͚)A l꼽M`˂ӆNSnk i"PAWmPy4=!&J_ ?O lѠ6v r61dz<L-SQ쇨ʒ~PYN&[_i{`"w6fЭ=57d6N_atf1z:G=l ~eBȦçz0x!3x|˫{3<G}$. FyX2*sF#%c !Ɯ,O:I׎F"IRzId.;uF΁mަlz= 8ܗcK.ìMob71kvWlO~}eQcNPi Ԡ1$q9Kg6@")x@TP;#go9F=okfz<>•WTu\6o^pොSfrnzaPc46Vt؟"Li`C>nyG;Ms^Q -܃*PJECKxJ* k q΁yK=]O7g<Īzy56xB4r'V=B&0T (R0i̓ C'uU=S9޼ԃH2?Ld 6 zl JcՖQbv{Y3W"M-2{х>^du ~#ّԅGn=.M߸7QedaT3Zb{'#Uag()c 쭒65ÿˌ@&haUQib$#6_V1We pN~9x/Hoa˟t &`$[ƴ(0Ro0/i7x2nX6wk;eYf. "nH0Lq#͎qZ9w/v(j֯TxfwU)~Hl[. JVQ\^Cg<,f7 .z[\=5sXTOǷ,,-Ha_iɞ|C8mlAirc # 􀴱'  h5Sڗ'z?y ˟ӖOo=ɤt\Ozr6Ic#V+NIb+,&k^p9o$ i~ZI_8٥O- '7 ixit]Γ_pQ?,R{oXL1Tɯz>,C+ }g.x>a~|kU~ >}~#tF?o7w:"@xʾ9n7W0NcφFyF8vDW$P$J0Hzc_ r4jHnBs l6R??c9t۪p?e N{dЉoq}tMn@f-2&.4;S+*ZKO6[ CJBm p5- \_mr,{F/6[A;L"#.1A_D5I0zZ^TMciŻ)= {ɺTj-'Um< jFh9Ex:t h >'6w ӄc91 mqb"I B jg+s0j2vFG%Swl.5l}sK-/woެI8;9>n|@iXˠWp߹ {p,/u`2O_m iwqڂ#2:z݁9yR3SQ3Q3n׌S[\ۣ3f7Z%RSL(mB$<aBM")d$S-Q>piԚ(c$rPb`pol`eR11i5loz%Qt|9 +mz6+r۰CJ#nu=[^>ѫzeY`YMyH.84j?ajr ƈm,yb. yQ{db5^0j'a6sM< Xap"+dFSA"AHK Wς㢰l(-pz M$oK1II`ޢ%A[EC*hPA*+wTݏB5"H) D&dL脩*D0׌A3.`3JsTAVI0cv Ě=l;Eg":8t+& WDm?m*&/"QC{6b83 F3twY{=l2{RҵJ0~2/NUMOZ"|F =Tt+k,,F!ݗ3qR3Nї+/;6=?գ%)GץNTګ^xVf-jF7EX ʫB4?4!6 h6q!/yp}%H/Ww.Ƈ@6J {0U RG0'6n|7 NS& gԀa7(N.&Z+)n6K~x16w{i-5$'@w`=u,KfrX[x/r~iYT3Ѽ0UWϱÚXW9zVQFuuwr|]mo#q+>ͪ_,8Clqlu%KHjTϐ%lI|XILW=Tw̋pR{yB1Lػ d .tiXxC˫73OTk9MuE%$ )wm~ Fї -2<Zw6<\sF˕UJ[-]mO"oY3,~䍫HTʦN?-"~cME"7Mk=ZBN89"ŋ%Cɔ`z 2 lV" U{oRM'7!ۑ5M+0a%gpwFyYp0ԋ'lxN*4549xu38(fD) Uggpt{ f7C1PRG*y6~[h`0A[*yM}Fe]QY(|7\8;c@yVY47m~Ÿ[k}4w觿@')fEni:I{ȟ VΧzv٫鴽1l1^̈zx>Nиǩ[ZeH_ < y!)~*X+5Q2,O|7_^\]a+_%PFlt0_~M꺹\S  @]Aj $pP=􉐲X4FJpbnJ) cbmd&$,6V+hJЁSϩ0T=zK o@`[t%|u2O>\XPfik,-ɾ/-)҂>"*+<*" \i ;\)7\Sk᪈+fHtHq7W 5c x \zp_W$Au4pEZv4pU_R@:[+i;~rJ%e4>: |RJ Nmۢ?eaU@! Xo.*OEz*s\dҪtk:dy({^7;]wh}{oד37?tzo.\wgtvIl9G6Wqso/nIݣt_vQ䛽;mپ͖.8@@#E\aiH57I hɪ <$*, e禎s9=\-wbSR}c(fT!: (K;)˓6BLd9sYƶH(*i-W~zmyRɃ/p]W'*StKqċbO@?1dMy uN^k6y6Emz҆^* \j0D$$TԊEt$k%f Xj]8ƉISsW-a6rCUx' ${!UtGp̠V "\G5qvrl*e+Z!|̐L)"Ȓg^uJ.%TY3[18-H\dՙY쵚8;b|{DlkOzDR[RzSjhC6MTMՎ>pSQLg) VP0M״H~-Y@!l$x+&' <&'O=xZ[P󸡦%£-M_LkW_RoUo6+9zO/_`mxyX_~wB/ƾ0bE> 㖳QIHQ[e@ ldAg5F[ eU[^Cq\Lz/()QOia1cb$Ԧ2bWqIp1մ/m>.Z"*RĭPlRxi8C8,A8YvbH!3@^tI$L@"4)bǬ`ɩNڹxXMu꧟lvQ&kǾh+#q@=7uH֞qo"V3ZF.-r5's7bENں>51@qR ͨ RpG 2l!'-X$P2jqDrh=YW:iɞ(d]\|xDIA3&\6BuY`'ψ{r^p.* p9p/xXM;C{v@['n ls䣫G xGx8ޱ hw_)ѢuVc1te p̞1GGcc B4Io 4n{ᙂ>|ط&)o/YǢ3Nm }/"M"9j:%4Rg=L[XKV F"=7: y< M2Yt"\{jɉYP{18;:4VNE&ɷnB?z:?HB*lqII-=)Ejc2#̨3V/5.&ź9WƠ&azjj۳NS)͚3'ߓm,dv,%f`T`q΃䳩fޱȨ@8)r*sm)eCY`H $UgG;k>>?J_–v1R󨜲9.!QE 䔡W3R A'-&@t 4Ͽ_8/U |H"JFK!rJF”V"M6jUZa1OZoȔ֥knR / <GbLv4<[;]Uo k^ۆYU(;?΀w+G䙁Tܕmib7RX(4:2 k"B]@&Gk|PVD!j4rLLxy`1ҩX"e0z&cf!Zl-.^x{0Ҵ4GKFDġ` 6\H>tvzFOu& k"Rv*4L54#(0*!7ܑgA(?YR ^h*Lz9yN$aŸ҄nmB :!9/ovNy g6lbfAomDfB6\$_wxZP Q=>҃I3'@=Ϗ.is~X@}^Ha~{8w^R?L(6A@41&1iLA=Wuy ٻ+Qz&zHj}6(+QLJ`6?.to|]44W5tD?P@3뢏K_~F=UiD*9#GNd!ƜL:k#S,*bS2" 4 )Kn|L.¾f., "9#7a`ƹleYM̼ l>"h>]|^ cj7t{'5]/λssK_ֵکS-)s)K)HSv%ic 9! hd BV`xg1YZ] aKparr\k!T@ȨHAy܂Y;M-t}hm{UNߧJm+Bip'b#B嶟e^|w|tlF4ذD:}>j͚u?[UjVGG])Kz6w{F7}QׄD;s&z+6NgkQW&\_O+k/M?OǡRĕU GA؀5p\rEFTw]qE|t=ihi g]O9q63VZӶ_Z* U2Vj'@)OND~FyW<^{%1Lk2s,o``e .pu ~W*;b{DSs43/l(<6HRYd7r#B';Ju8buAδn*}/(51<#cջ_L9nGnUWH>>\1E$L-VY f-9MS U[Q30ǯq싧lB Qhfp;}U}v8SU '\nVW[lW1uܳZu\‡h[/Ձ&j6jQZ}f} +TK z%fXg{yy=ƛc:v>J0Gۄm4x8p+OMMWpa>9x_)/ԷfaDF;3|7nw?U=0q1hTrC3ѯ8:'*꟨[$?Oji>I)o^b0ˑl<$,fDu=0[7,:!1C1ͰiŬ/ |v+Oi/ өU}XV+`qK2@1&ǦбW`*7%\jQcF61_i_E+t&yOQs̗wJǔs3tպSs5$wDYt?; U\|_"ެP3ArAV߱@ͨ ?0o3cU&S$Szh5ݛ);ݺUV DSsYVr~sLp\(Xy`KnO=h3~DTyAuBǀ4ǥyufB蘬/wL̓@:zIuP IPS$[0) Xaz ])ϪafgqRWw%{Z͗³UÆ*:<'^L}n/Dڤtxȍ JY4 \U"3FM=Pr*DN NJFR׫L~rZYg3vZPp)z)׮&`7#OR,7"P[oMZ2_c e & ¼&BP5; fUTsc%bD pnjP7a6wzT'%GP@H48iZ`nCe8h`rzGK4CH!X@DyL'҄ɐbń)&"K(a„ [B:%~A*K2(r rւ/8!4I?/Ώ`@bi_ 7~A4W ?p>@?.R%7`T%OWx[Tajp3JNۤH@i՜.Of9.0(|*},OU?b" 6ַcնg7Y\<.=>MbƶFq{6(4x;,5bF; }z%m&̊\<0 boK 0iPb@ĸ,tؙar4^ %\~(ݶM$I*.s/iJr1A7RP0UnC]6@0iƚ\:@\VBre2R`q،&e /FOLyDwo({ug*oV6 @X 7YVwŘσ}St.իyCp225.a05k(TO:5g\.W]_a7`:Kd9wUr&ȚhIΈN[L>E s>O:dG}zVDZ-Od@VwkMƀ7uXp[S4)Ixz$c?+$p)=i жӠ\-f@UHU h)Yk,w ǛZzQNBVKB~ҳ6 mdrgȋeyJq -2oh۸-~[QO~q8-&6<3OGU|7?|$~r uZd88]Ź({0;Ӣxӷcs)od3G+ƦdRYJ3IBE:/wQly{",{ !)j8$66(`="*~)v"{2NQe&#QHYu2LwZ |1`ZFL&ZTHKv 7* @#Dl,JBE' {AbX} T1+Onǿ?#tP3HM;iyN tWjZCKx2 vpEq:L m ).)a%-q$yѧz;[IPl]l/;u,KSc{i|01 'S du fJ5uw!);'|0Zlj=bT "Q&6㜳rf-hP0T+6^Ԗi[Fڦ\nZMv2rFBB,Z2$4}XSVN['eǞ1б L$_4c1&Ei^B4r ) JPǤt[(p3"اFKI:gkn#ױu_w:!A$jfN?OWr_Pe9cْLۭSXZm   T[I%'# :/ tqb%8S/iRUla};n^|-؋?xyO۷Ck&[ϼe(D*媈x!jDTLxWg] GVY WKo&0!;,1{߼R|ݵbr:pB*8H-Tj%P:5;ەAB4Y bF<%T B |ǝd7qv\Wǐ }8x֚I2?K5]g~0O߮^. *k![%ELlD9'P,) Yjnlfut8꺍^&4DލYNLĮlUj47ZٍOiҲ{ j45|S)kWp WI~Rn(JWzioinnK܍rV<,'m% 'jͯʜ?'o//4>wDAwZ[@Z&c&c&%o2sDp< \5iipդ\E2hJpDJR[+v xLɱ"1UU֍~gIz`ǩwʁHdSrrkW2mMcڦ%n0'-Rnd;LO=Z竳%.崟,,NjoI~Ml*yɃ>&$_O.fek-WTqUyk~f&r.[̞v _N./zXt7'n_fkAo:`RXg,#f U$AET ] RVLL |Y;+l T 0em3BUµtц x䷡QB(ِ8uZݨV:)Nzgە=WWsmUP(Tq8J[Eވ/Eܙ;]XϗO}qUf溗Ӊ)f.WQŇG0kƊ/by:~A3X  8yM83P(P5Lߊy^B&z1"v6>\案'@u| qN1Av"dcMƆtq},MC:[\h5lvayɰdͤGjg]ΒH;7 S Z-tH616J6yg.|c>-wӫڣ A  -z[azOΩA3'QFu;z%vggGgWM}C8R(;01 08Dq@o"DBKQ"ҔȾ%Mh*hEcBSRub_38%6U6ElMJ!-SU66(抚*VB"hQ-(F'SnM)я(4&JQCzpB4a Ҙk!J,~'8QW>֫޿O-/'-C٪VbW?v`b~&!QDdS:uqNcc@a kLNNH5E>3]U)eVN1FD*(]NWJ(l|K9Y# Ux0M:Aά8%1zGDCFݗ?-| Ҥ\T \7U :!A5h("o"7,&ʩꍼ%@ҩ`#q`^"hstN;o8;ĞΗ BK?dzYUbl7=9(1.+yzb~ǼˋmPҠ{xCtjYfkk|xZ<~QovDzw8,xd i!ĮuhWwΩx1xSJTR#$)j7\|ʱ:ɾV dW&hm1TMǑWwHf3҄ϊkخ>7flo}"r 'Glo%R ZeL`LG pb $ʎwPڹ< lr0t8*\ SME$tVG/qGx1!XPv}0f 2 Q1fPXQg 3d$ Ƀt}DTK.!I,PD'XJ|"!QFZ1X vq<\_|~KPDΈ"N`fs AG!;#a흖-jӪALŠ1={9ظX4UahCJÙcBO$l$4`H5茈y~(2Qpq1kTEziɁ/.pq+2ArJA$,]TMI^itUmC, +2f?shuҎC;^U=*>rʁs#>xFv! n~|Go<#+3tmn;y'&-ٱwmRne:龝N?3t%۩аm~,mp[|inti0㖴׳'gK0Zï's)-m|ǐAoJ?rXz @& X>jB6T50A@(}V*޺Togv!)_fJM>3?"Dzw]g1]-(@dre`Ŧ1Bj,٢I#s=I\GA6ap;/k(*˜ q%-GMm`P rLאIrrc_M[ӶFR8|AȦVO.-\`\6zeuzgHYܽ?$}m9A'/2lV/'r^-.4r[W׻\9]|=ߖ+Ce:wmI Wr``sĵD2$%Yw!(j(Q8- =5=Uտxz)"k+SRΔ';jAf9;XϯO9cݣi?MYpP^!UG,ae[[nzWaA@y^;1{_.|.G1QD]}) h<=ÅT;JGh6 Г!bޞ Hd ‘6h|-r*ip3E,r;B7C#DERQ0*Y%gQo'H}\ίOCȂ5CO2sEO0n2G>h)@s(ck!q,̵+UJt#@,؋"qEܺJx \s<Ȉ2i;K|RQӁ" ڴ;9uQ+(@3\'x2%e%:rhpt6ЪHg V(qTBcHA ;=DiL&%vIytg),ͥQg$*m$K*(Ax{/t,(םBXF:j_p!!DF0 I(C[]8^ $ ": Y55u;&GSԫ-96q-ls/Cd뎹K =p@TB !'UjMg;W'Y-8 R9 !E[Iv 8"w΄a*SL}R,/ !>R&%64wQ(EL EQjr7;>vtm2V7~psBwʁBxeWϨs~pRUwҲ^?/2{ْͿLl>_\o ʦ/jZI޺v WT(qR,k㸓;%M朻ZUjF<Bw8mਵ4U?W: vZ]׫+]˛m^i 89Tͦ Nm)ł,Z[ H)5-ušˍ/>n^h'-MDfA Sa$;!%f&i=b6.xpR!"e`u< Ɂ0Oihҡs/T.a\ "i%$R 83xCF8M\(Hw<DvbiR2m1{W?`PZp{ {qAN#?۠r]e}iޜ58^ ď+|7`FvWƹDW !zp{'Q96/f`ϫfY @̅}\̀/ #Pt| 015/7? } Ƚ@ьތBлA(ԃ~sIUrį#bY=MLv25{pɦwciS X2K"|͵yld ~*kǩ̳ r+in ͚SY?~)[lk/38z:zs?Li{!OnuNW7nĤ OZ%.j*6Z+& 7w5G Q"BD JUH!Ec rh )D;X&&5/px\ܧQ!cqnhv ݅>`1K8Yz%㠽EAisM F$ɓ@E[`osLh)U- "uxfRu>PP:hY[#a ^ހg=mJj嫳Ym)ٮ,,hۺey4-Y~RC}Wָpa8UC ZHytvxBL~9+ZfT^8 *]`BJ-=eDȜ&$E;$uB{n>\67gqsΩcd6n=+r؍OC_K/_<s'HHF6v`>J& "Q),uR!\[4A4BՆɝb3*2qL(*T8us` BiG<gsU@H\H 9Qf Zs&Q89 咑RĀ'8يGjm(47P ӿsQ/<3+j\UqR*+o(!ZcTAXKBy?,aosҠ_+ .-t.M>_ B9eqy⁖cVė.\)V3#6C9lbP,w8k=܌J#/0K|7WTӟ?Ki/!fw8VI UOa["Z'sAzS lz:v $ғz%T0'Vp>KD 1xA#Tt3܏ylZ)J\LZJ245K?'<ޏl) ]4?M\!;e8\~BhVc!7Q)?#<0 @-WI8`kQ&x`:hj;TOcn̘Mٔ/;VMhi׭dg"dt}=5vԻ8ݾ~rx&7TI9Oe= YBuZ: ]3y,[㍽M9{z3§QhzŹ+RixϟeFPfQyft40q^[LD/cy PWZyȇw?`M¥׃ї'L=8?P ?}|3O#g\ߟQ+a%>J)qrQ(SXܥ,e9+[rIQaߝM߽oRgBI (|Q Eg$z7Kx扟yey?g2A0;]?YR4g'?]\^jX>LnF~sPWSOͨRM U+mDO*i9 hpzy4w[jjʙGLxz=駖vdҲZ p!IV{s ^\pԙ<]{w~5l{P-JC"N)OBL̡ڤ;˥v=I6UJ<\3qRH 1C$$ FB ]-rzq!Ȯ|tig̔<>i0dXnbK|\LFB87L1r!,x2DIBMh7G(4Lj)c$pMwzKǹ*D&FKmi ѲFΎY!H')#)|rrRx40b* ` `oġxMu8X#񮑀yjxJut牋d@,jpV>(@ErUNƎj7>Qʨ`{04pc8ёZ <5O,e#QXxӲu*6|hԝ^'Jhݺ;pK9]{k7z+O~qO Z58++eVu+ MA8bf^,`Wխ[ ~81|`Z%Fm@8JKgޫC%Nj4m[]4G)Ȇh!zU+Z[-o^ȾAshlzxXkO)!|=l:)rOD]ş:keq`B:ZtEz"f/1Ӛ!LdB*x `!!D (BIIIP A]Oķ㊭ݬ^uu6=c[DP*;޶,X`1R;·۝mXYRKr3RL;MJY׬N#%J<Ū:uX\3xΕ(5UKfrܸ^Y mɼPf}Zpum߅6<ǡOZu6;)-SUx Wiu}Wrt@CtԸPڧO :z\)3V|`>uӱ"/cn$&vmRC6;ݽ>q'MS1x2ڻL)Ih+R997Zzp{]v]Bn`n|Q,Xq1[2rQ͒5&.XVأ$iȵw}YɛOٞgE[ o_2#;Io]{0yV}Gt#jGNl/[D>z=z1)Ooڣinl3&Ӽ$.TҜ65vq[X n |gmW63(UBVh/W͹*TU`V8D|r'L;^W<Uތy..q')]s&SZ4r|̓ŭrHYa(`#=WkpV0q:=f].fq+,o0`xx9E+2ʢGH+U!qѣ.z<ҕqFFW+5]! ^WHlu:6n+6t]kXQWѕTQ "5dtR2FWԕckAHWIft1*ZLBcQWѕgF JcWl]!sTtRJubt%[f1ۡtnЏJ߯ޏQ:dYop7j̾hξܞ7-i=h,]RQWgq%u&3?3q;ƝRҾQ7Ej= euqOV٬_w+lpլA٬IY_$g}cIq6_'n _*(*T^.Uwr} Op(\}h8U4Y,Embslozߜj^y:HdeViA+/T1 4j %z2xc>{}L119i HE,rS"lG[>_g8rkdIJ 5+41q}U<_`òO.+7q&ʭ\Ũ[/Qn[r ŏ/-U\v̝2ւV¸.!9O]jRZ__ߤzEe[~:IEߗ\ÛHbn;ĻLqLtC挹k%dCt8jަЛa1ƭNNB7\e7Zӊ#(=얊0FiYls 4+ Fit|sCW6;{q[tώinP= :ln+V2mKm6 M\ڳi#ڜ kOiygC/ WwCC/M.uZd9~瞄6.;QBoޭ }p҄7J8дoC\,EfWCkd y˩ ʫGeAcߓu5O]'W }I]r胞yoZT>Y?jlO:FCӄyƟ!fbkcL>esxWh>^cm~B'k~$m~7?޶9~9mݎ u@BR8; dA7+vH *KPV%lV;p WmfԽ$|I ovK{= D|9[ox.!X`mV`$ǼJV&vco1:BPOX@oݿ:Uj)̪,?}r|rzD9m]lh'ٽיjL8*f1Ư~\d<*7e շH=Jk+}VFΔa6ĵO38sa3^B1 qVHkUJںQWϢ+ɹXJGFW2h]W@D:D])Vt^j2;kx!jNi =]d+GWHtueTIHWvĕdڑ]!cWCԕ1J4#]!QTt=CQ6ZPB`+ q+5,;H)bgp9Ki"{B+ %3%g,+3rte[f1}o WgH7Z~(]`wm]٨Vކ  >cFrWrͥ} '4shq^m_[;uŨhB$!]`Wp*BZCRu5@]aB[BB` p=#]!P )2B`G3̘zRuRtNtNt҇+4ue=9 +֧% Q mYճjCA{BA{%n>v-qjrF57 _W A= mzl7>z]Aӯuacu;2B\I&Bڧ+ #tZf1˔6'`^`7ZӝnAuЕ:<덑2u&BVF+#~w>^@0-kk#kGaByݣSq[)X6 mp)uFJ딳VkpbM "ӏ?%7gbRlI4\t|~<ϊoiSz8UBC" 2C"h}H(qHdC"aFpJ! i ]WHYu%W[GFWDW@ ]WHUuxcO+֌j\2BZ+BRx|N[ ]k wudkn"] QW9!]tƮx*BZ|t Jf "STt?Y)VSHe'\DZCR8v5D]y9GK2\DWHw&v_|ˬ#fj [(2U.PWGڧ3!T t1NjRX> !]!dt1*BZBR*u5@]i?VWl q |t=|u,2BH dtZPҺuY++X+MFW>Xh ]W@iJ{EFW++5GWHi]ueetבwuF] AWd=ܵӻ~%C)c1/S &,; ijl]\lL>ÇB?׿w ߗ׻9(,Y{BWw 3~WO6΋l&jcS}|-iZϗPXvZ#1 :jwe!R(Q9Y1Ysvx|'viߗ1UWWYwzܶޖjj'd$ E:oj[aӪ>(Wݛe@TTBP>|an\=0̂B86K2[G2LBL.!MG z~}LnN_AҲz^,jRcu ms_eB+2)v'LR^q7ϤqIg&mEP>ݡ]V'+hG˪int2+׸*3b1Ǧr>RA\i)^j:̭y+*J[g,+4Lhm(J),[V?{W׍lvQdY f L˒aədNdY̵u7]V:$NaneĘΛ̪fٌ:)^ZCr@*iv>'3clkqƶ>FۜÖmݒ"v-SI#VRGZGwH}f/LYBQZ9 jH) jg'vb2)7a>\l]!z6b>hy|"ӹ$Ї4q.,sR<r(2EjZcC qȎ^E >d^D`A֙ȗ߅i(fS "<,{vh0O4/MhDzHQ6W %J*b],d.޶I 19q/YWeQ\Zl3b;Y{mFe?R`TFE&pȚ`#aV#wauh-Od^F£UZ[)k'fpuTK`2%C fCdž2a E61d`AV#ya3T{B;}t_PѤ;$L ,>9Y_W;dWb@j;!7CAJՎd<b Ю CHȶ7ɕPѡ24OqҌD9lEOMWQ s<W`&15h%B`@J#)'|T\7Md&/-J2:@M=5D]`#)#-EUP`Zg0;#JPAO/uV qJ);UH&-RW^u1v\8+2̫#l䷲ J%H -JĀA ݚ` dw1#*Db" `F U+cfSAS 9τA{'{thz".M7YV&e$''cE5ʃMid- !pBˡ}?,y vvoI,o/3C- acs.0dmf`-$ >:|AuPe0L!CБ]V%K*CҵLŃZ2C6a1%OHv9FEE ]1ʃk IԆ"92ke(S@H`1F / *FUW Y-'[ߤx$Df6<ҧU,ԀGwf>oɍXż fcvAb; eO7K|}<̻<ن)Sm6dUW1d@@F$4V=r9,zXApuj & 8gk ) 1E'">WwukAnRNtk_5)|Wcx[>g->۷7~k2k3DcoF>4]_?`G7Ocm7󉿌۳ڸo :)?S&zHv&9@H6X)fb#h;z'PNtekuR': N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': N u([ ؐӆy37.V wY@R': N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': Nuqޒfx;N u',weT'I:AP': N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': N ut@]ԝ@6p q;dA̞Q6g"+ C>83<#> rl:܎́v-%Wx u q+}w8apY:mgp0vtex9ҁv,g+O&xiɁv[3 Ѵд% M گ ϧiAOhJG[+ecʓ+Or ]᦭Е}PW:A"† S +66xt%(}V:A 9 '6CWVJo1e6V*O+uvA{K2%+1- - BW@_{|tu:tB&%uCW'}<-=]%k1xtKnKJs ] BW@ GzT:]'{ePZŗ?p`~quuah :eGFWrJW_< _#>oϜ;[kǣ 9=b[܆n:hãQ(D,;}sd}TWX=;ʯ.}w}y9?ۅwwKߖ]Ks#7+=NR;=1 Ѣ"eR[= Dɤ ]U} qOYϏ x?(o7wOR-A~Ԣ3U]b 6v_|ӖokyfH(.OnN%[AIL]o[0sg0j7㘯|p8}Z"&ybYn_׋Hu;cG_]JѮ l}[KibNSFK>*2 })uQFW7Rrt~`WxSeFce>ڼE=9ϒ(H^E89=D2e)J& A0E0Ϊ8wٹ7*E}/n|nr:c|V8.ψ*ըu˯^M&oꪗJ^{W.pq~譒i旰]y( f;YSۮ^weID]/J6D: TNΑ'[mQzᬐ!"\Leg$ȚmS(cd"rZ[r/YAY2lJ3qdX T"$5ji-5]H"LE3  =b) 6; =P5lUI!& &)=E&}IDʢ U ( G>j@hZ*FW`cFJuzmP)Q( '兵<'E|XlG664Y_mmŠΧŷmHP P~jG|g9[{ Vg /}uI$vAė;vV8*Cw2Mj6|vN%kz#&mju"A74Y7׾oLMg'9;I;xOY?׎nolr;}vC':W%u'8.t8Y@Mg5-Bҟq#|F:jRNg~ T$c !G/86Nǜ= ꟳLӽN=!`B'|;?>/oy̅ |{KXdR"UA]-Yt Ek23> 4Rhr62!#Hkfj,*,"J5fQĤ'8b87P&a =E8;eOדe:iWSb~zw2rX"Liau܊n>|w?'?F59ƭ|ni)Iz Ba!bvF0Ҁ SrŖ+@H-=3*mbk5.ӑ, ~mAgA.v7oos`n'Q5.8|L 8v+b5w):B"ȁĔVEmJQO셦t%K{kXy&*Ib.2H, *8Jd(iȮ`Wb$M2*Jdbh=hSp " d%A4Fyb^}1uLTx$yatt3p{jw^Ymz4h)Ҩl9*S&VYrB! {H&$ hMhu6xce_+u0efC {j⑹-;[r%a}oq:4翠*,~'n_JH*UʾJܱS Y|SM)8eig֦tK޶N>'W_]xʶm &[9W&&ӏ׿Cw5n+_\6?"[./?t[MfNIN~m]Ĭ;[ڻURGS׏mw ~`oL!8Ce?~4_nN&awyܧ`@(b"_2}{SSz7x*z4x*z1x\y\jH|fMQ:$ QHQHF|:Bq( Z Q,T$+2*m3B1РeKqf<>}~Ǎ,..7;sf~i@+8wWG^{C C, Xdxx.Y]3շJ9^tΡj=]'zzovIkڨ⑊I;R'+ -݂<U<2=xdRG@'R4)0=:3MK <,UFhZJF4A?{/1coS%,rqO 5)eTX\j3j٣|p/wTglO"Zz ̞ԫT[Jk!'Et~fYY`gdN撢ٶRQ = K&P,C٨UTcwT U \JLà nlgy-S+8O?|/jYͳE 8Wk#CEh1 KD8 %8šx[|8@KZXkHe퓩SQE0֒$O h"yP(eZ͠VZ4W2s%J/Pu*E*0LdA"ql-lYͱ40@cu.k_p??+gt12;YÌPa&ϟuM9E2zqvjF]-g/zk"il|.Ot9 %jB@Yhp o]!'s+6!&1ụylX7}fruaIЫy ǧqgIlҭ| Q@N>LH&d%7#۩t6D֥lEiQף%cJ0tq4.:cM_ TLv#JY C1HfHARh(!/trD@Sɵe>uR&&9@wnёQ)ED:'Jpl>bchL',Ƽ琾M_jWW\YS)A{Njv=qgq@GoJ,nWZTJ,b$'$B<].jw8f7NΌ'ޢIz'AxFD40008Z ξ6X Y($ 2ًR,.K4Vҙ˦V N[+,"G(Fb3ƲIF !ԩ1zB4qw -=xݜ58:(ᜟ,}*H 80ajuӾvWhјRd-躤L ^bGBkvL*PQEiz^TQnQӋC(-/*eO"/}b($&e ( d}D~Ee]66:߷)O]Q?]0P4;~C2ƚ\BWtx+;t>tp.꬇}Y^ uڞl{WG)otNAU%E9 JŤ "t)Cki-di:=9[T! D9cN#HcH8|d%"h]]wu6Ut}M-~ϽrsXufUG  |=(U)AP`E.Jee $%hle ULd#J[3hs#ӼZgD1Rq4M7ӓE%ϟǿ9d1Zセ;X'ˡ3fJ=U>>oT<4OX۔2GdI0ʔخ%fJSaJ 9vD RvcȔsQ*]hP;\ ֺL9bSfxQJkjglUڭij/JTmqURʚz jx<84챃e%Q%HjC.;6$A)lI1B`$9rqIt1*ìQiUݦv>9Y: N(p%KC롎Vx0O̴%DH*I]w튜uGQ/6u+\'U+.y^d[h{\d?ah" HJ@*\e.߄ZGA=! EU2#%p͘6x&B)d^85NlOP& 4zJKoy1Pڠ)űȤ<صHVҥڒjXzr3t nBv' >d7`dgXj0j<nɴv^[p>~2Cůx(/ur9̿j{!e֟*7 ՗x_n&*ւJr^> gMK\Yd\^yT˪KeŰ2?g30:(/Q8fUU㯥^LqQ{W?|Ƀj&ѳ󬔳Ϯ %?G:O5[C N\Ta &է zYeFf_r>$ ڰq{!o N:d^ggԠn<%;+8\??kfxݜšE;XO .D0n&gq#eYOѬ0Uˇ#W?y+GEQoN. j<_Ҳ 9˙1̮j%ה/}5iw;^(bn㍛ " 3r0IQ\+W, Ηל_id~Y>03.5_bzˎmcޟ7  =j#9x2!$@Ii## Y,XFh<3ɜVfpom- K^(ϴh|,*i0E,r;B75 H1Jhbl|R;Ҹ%.wdjm9`M<64U|EhxWel-$nE6sŵJZ>}Ds:/廈,`jO~nrp2LZC➡ JuYԦxZ@p$xu'`|>*(h*m\"1vD!(QP !mܐ\w$J΄`1 AL:#u86':Fh\h#&XR!&F9$s tY Q{a9L#> }}qb=D#M\(QD X>P)+ R=}uY&x]zL+<:Ŵ+95q= =;1gR)̕}7=O3+MO)ϕȩKl$}ho8\k"&QJO5Z\ί~[1`>\oP|%p3\C|c1Z,4,|v\[biU.jņ?=SWy[C| PPc}Hryg.3WBDjι.k] =8j##-?~aUQVl׃5 wVic 8X"RWYm#5.YRiPzbS }!-%1 ;b[IA= S`0D%āf&}~З0䢄&)d#| "i%$PRy0čH9À`_ 4Ϛ V_V-(< WEM+mT+^ǡ͵_"jB \q&_Z0kdh_K|d I4%/%ތMTl^6`ƏxY\YQ O5 rAWߦaS@KJw1gpfCWx 7Ȑn $.,(Dݎ [pKHI=+P<3UMD,8  27~N'Lz[y+KnAe8uweIZ3F-efy/m4 em%G8?wO9jL|2!q'3? ?y7s+#wt<1k>HZ".,nGv1PJǬWE,6vK JSnnɐcrDnQ>%BQ:ީU~wVEKQpu 8ld:_(P[wF~ ވ>g=m^n嫷wYm)ݮ-7, V|I$XHn%3|L姞/Tʕ>_ɗDt{|y{Q6#ف+1Zqja 8sW<9)oAq쥾3? '$EXC-2{(8d0g-O"p";K](,SmW2j RL9K5@RP%<&+TFP(~WW/%szi- xYqXKKm W#eT薒ѿBTʦpc(NYy(+g9ӊYC "ff2q t戫9zYU:)_l MSK*ڒ۵~~%DM[D[FG%j[6 lP/WbFQu.޽>=^2laA1,Vގ.,\\ɌprhJ?BDzQFGsɟG nyWL\ *%x꺝ɯ4Ho)`KHk#gϱ}MYƓIa?=ٮ">gKAz*1~H4s;|_Ls;lKdsQhrַpF=;rFF NgHyz$RJ MTiYkq)%idmQ?֘k'%rTb}9˽]%aB}92My;6AXw}dZSs5$wOǣ;.r.`gc&xdڲ3Z)[64;ZNcaΘf&mIlJpSxrN?%ؿlaeK9OHgW$I7w[zůl6Yx}#:0wcV5ua%֢#a {6bke עc[W+Fpp}52 (-F])rE;GWC-RZpgWFjAHW |=AX60Q&tB]ޑDW])jtew]#>8q]=؟2P8snm ϮǍS .l?wlޘ z5ϵr>C[}4 ?~0 ]ZXp-yQUMӏz x0 ؇P ]uj N"]0jtejѕJ(]WFjBT ѕ ֢+ǥ(=6]PWXSveBpԢ+/}z MW/+\kʮ%ҕ_Ϯ2uoT+g]-ue{MWѕ2`jtejѕ҆/=2J*y*ҕjteRDE(~"QkjtGV=8.yiL܅'2̢X]yi8CWtJͿd7Oѝb8lu<<F(͏7g{}s45' ;e7+~Xi8o_\ߜCߎk~1jƀ=no.ϧWO)W 똕]/KWOW;˝PТ6\7_1l8ݭ)-Ru״|gxHwvbp6E­őiL3=%LBO/ h|B#~@Ң){cZ6k6$?%6C{â܇,M# e؎{uIMKwq'ղ$nej5:VMnB趘oSmP2kZP`fhp2`K=gG7 -ghۑ"]HÍR6q!8'MW+UĞ*ҕW+Í\VR2tF]aQʀ+zά mϙ҇Љ ʀѕj(mrt]wMW+cW+NXlJkѕѢ+,핮UDʀ+FWFPjrkʮx3ueX͓A?4&2QWƮػz 2Z,~( =#Nwg_zjnXx}yb(iiU/8 x/mCo(w򼷒uq9Ѐk.~,>yaH }RPrN}ꝋ(9+;aQ?2~Z!ׯb辤_tO ^8?糛c4ѝ~ k]\Nt^m3_{AۏOԳ[[Lm;A ]u^zVɯͷOEcNx;>5CP phmLgFYf-Cx A;T:գ+]y(EV+ ONP8r=/+TVKוQtB]qE2`jteh+ QWaNU8U4vekѕFWRB uő^vJ!r52TPz_PQRucL`(T+ÍRZpgWFYkMW/+'vz:\EWF2h䛮V$&]pEAMJi/^WF3芏z>q"i'+<Ń3g;Cٛ>+J ot4\}5;*b3G#+Ar@Qsj^jdfղǶ[;͹xQ̊JEe;͊?K:E'fF.( yV=$bƒ>GW ]mue䚮V+$,?Fvp#բ+Mt])%9jZй$PЁzteTMghŗ+L3F] QE2`pѕ;FwmZ17<(pEWJJוQbՋ*+ҕ#a52kѕ&*]WJItF] x(`yѕzW͓AgWF| u4nUr+\DGWȪ' Ȯ %bhaQ]MW 6>b:fV@=ڥ$I /jsipkѴF5mM+Դ*ҕHR 7ZtB2JtF]})R巿K]-]eMW+W+pOjm,]WFY|ix"V+T ΠJٕQ-{t]q@jRT W3nĥ(V$TdPד].U+*]WJYJMW/+m>BER@]nfJi'Fٞ RW I\MA^|Yi闏цX]ɑU/'%%&:WOj-ƮQBaC2CWt tt9)g(wa參·`U. }fVUԼZP~RrdG=̩VɅo6fyouV%n3:=Eih,_j 7p-C FKІV8)P;t].ZteQJוQMnZ_Yueѕ2Ԣ+?1Jin֨+$"JCpQjѕ2+tB]E_i1])m\jʀ+ ҳgue>6]PW1Sue+9j7Z.~QFtB]I|R`HW9h%+ѕ~qTwZ|6|^W$trPvR;fs{7ϭjێh|ӿ;p_~}1NOWZoeqB7ڗΡc]E~o|~~ܝ _xq};~|뛋+ #ČnWC%1-\2g78?| "rk?}ba 9*Aj{ؼ]?.j7o\ߋ{s[MICCM>ۚ?~'OU?Ne?<., FAۅ#__߽qY2G>|? _NJtߑ5{??_l+%v'n~=iQ&sd% `pFC4dr@H3]_n Eoh7gzs?okЊڞƿ}~dL]Tnrt)SC-:@;zeb]d nH]\\`S]7~~ z8E7p{S;C-$~\h@)p?  .ii^Kzϓ v@89^q8jzq qB7Z GZ! 4@BNq< rJrݏ5:KD'r.'(tI1G禔Mjh h4A&*5ӖL?YGhmU0M>M1cd-3SqqbbBq|gS HiW?7]d'Qr%zNFyL]&(^}c1xoDkU=D7vrQϹQo%}AkM!UrS.{Dz箺}9}'InN<& y eLg7~7 6 -?dwmҮ%9awZMo)Q!U 旨zXha"xI܀a=Cxb>!f^!z79]6ŁaSCi91Em3<ux+ &wGўG.Q.C޷fD.G_vShbU `)c #ʞYT5Qs A4v*s[vB|0D1c7Y*qǃ]J=d,A/yP!khkۚ\Hw`!Rge\ziQ[H Mu=DӠ׳^pQFX{1h@`o:zSzޘeufi4F@/P&{y4AKB`C{UIsNz[*O4F W #: }50M79aUV#v~ta f^;2M&>LBM %(I E,)l} 1(zU $$tu^KJDA !ȲZ  aU(#ǻ}(:M]Ƞ ;a.ir@F{ӊRdiƬ7ICQ؂ŋDIJBm }U8`74Z,eCӚM{qNf(x] ި^01}.0ACka$ॅ/T^Аf%۠<$]j]CAh)SU!0阌'8$;සŗ9(X9䮅 It E&rZ6ȼ*a|ڄNR0vƣ4SPL֣:BjnuGM=2p=U?*PJ)wVtIM4:a+tm XQwPҷ]۟Ajyq:EjC$*StPK|B0-8Bm&(},C)y,a;}ۢ5ڝN3y r.!z-b'_ty҃,iGhTf%-{Tjr6-"`!-A;h[ QH k:hIYdb-6붃ܣgopsN i3]gsU&%'L`0u=ҭ`3 JI ݊YY:4kkmCO-V"eFCkĘr6ma#[TfľU1@s80(QC^"zHmslz(A(7"Fh8t>DKm:!SAA ,;hT) @)C<֢;l֣bd + IIlc̽4+nk!g-:|BgtF-L ,J1"@jcQGQ1۳w:HC[qOҙTE Cͩ- X{uٮ EuЬUS;ekdLmPMI9 t-D?TsBT4|ҋ7AK%pw kJ;Ҟi;XJxp*-QZv }B\*z/$4%#ȭ5>dN=ZMg:PCX2.4qFOU׈< zu6]મ7ti R;Aj< "!N](.5> ]шw&]t `j?ʯzBofv-n7/WKa(јRL{gTTVd#n_/>8}SZw y솘_? LJRM)]]BM֋S;Ҋ_Jf>;Y=U~hs-ϗNoB7]Mwz\^,^ʟ #"D _hYjRi9]lryKlފxѰts'_³}rs;=3:[N5-=Z~ke).0_{*P S('F[(P(v&K4; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@GBZ+r'ա'u?qPN#ti%1b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vHlMN ziq18օ;2@G2)N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b':P߫P %t=N U' :J'Cb'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vz.ɫ-5חwNj$ͯf=L .Jk2.pp7jKwR޸D*TqKu=7"§U"O+BgytbxQ]Z'FF `:r>8-++l SbtA #+J]"ZBWyFӻt#SrLӟJ BEtEftE(f:B˜`*+VVCWWCW6P:ʈǘFDW]nat[tYJTM \mQW֍$>0]!]9+m-tJaxKmMtA3Np?z݋c+B(xvR""WCWWVOJJf{%wkQW6~P RzuJ˂m_"g .3 %`:@hOޕk(CN]^ECnߟLjv?Wob/dS+)ˍ15Q}S,Έqr1ϴOz_8k?G;oJ1nzB}yr໫)BC= Wir,׿egySIɝ9yqr7Nփ'.%H1e g3~{E'!Z*ԡՁ߆=YoDx:|_^dL3*ܮ螼{'i"! cؿO3?N^P;,_]\1~;>mlPihЏO9.l'V=np3LS;tbdťA)VkQ 3ΐ=E; Q*tgʹ|޾KIoJz,|4L'fAGiBHmNT79Y|@k-ҝ _L]hiR% {Q=?C7f4:pwb~BʿeCGY7E1IĦ]-| 2s/?-3F=,'c/uט6&osROR#L- jZZاgr:A_.߭WáG/O[ N8oq5zxu^|@Mi.ӛow%gC٘}if5\nNc~k~ZE5_NTh{թp;MCsϨWL9) Dpc5IqţWNRzVNG:|ol/1`[2ڸfF񔉁T)tծ)%﹧R^ drQchWSж_l&cJ:c+K&; z')2;M;wODScI x|XyX 2|J?M/~ ["'g5;DwDc$$Ku;ω6}&}&NzͫNpz̢one!)e_a!#}ZܧpJ]s߶{Jev, TEW>U;rB9Ys -t|EeצC.g3J͐o^gȍBX#JE{iǻ&J/3ָjZ Q֓R?B4**+VTCW׋Z h0c+B򔬊XzֵuWW2:Frzhqf hQBi#ҕT;޸$a^%wGY^lb&7fc"ktzp_Gh=lhl l4Ob5+VHSj iekz'W֪+VPp5A\ՂpE:1+VC"K׃+3?>( ̽Owa^g:̻͘'E˯ V~JWA=}ShKO_1=}<.}^>z |NkѪjBvM[wE* @q:A"ޡmbNʠGWdp HJZ2jmvV[?*jx6+gRprQL+VYpʎtWڂ}*Mp$;N;0g&\= &r3?Z?nБA&.irwݥ4I;Y4B+ pr"q*) $MYp+wĐ֩q*- %EW$/W$wدXWWĕ2|$Q W6\6\mW:oAXp]ܠX-fjg &+ҒpEA\\ Rpj]&g;Ip4dFLu6w\P&SUtւ+DW,7H;Xe ~Er#]mT]~ WI1;U\w4:8'Men]\Փ];ٵLJ2L{u4cʯ٪XZGzdqr7;L?uZ 8sSVij=@*5Z`j  Qbpr+Vk]b|mtp {/W$7*1U:]p5A\Ѥ:(I rprM+V벟 J &+Ot(Hp@+W,8)b>Uƒ"\pH W,WNtj+RiL+sFH9E,XUj+ Q?{KGbpr査b NW1R$MY\ZdUTW+?~_\%crpTxvBwKLc:M4nGK i*s*}}S]ʁVpEq]JԸ4+Vj*"zA"\]p6w\Jlqe@ӂpE+kW@qe;'g2rQWRjr;YYW$,MRpjwխ3Men I+ ڊuJ Xm>N*;P .DI"1D1 5OY+""*#W,׉)d Vab%*t}WڰIu!C\ow$;KDCBՓ]kȯEFak-foŒf[F@alzYPlؑPnZ9 w< ųre!;PbطVY hvYA+q$]*MqtŽv4bbՓ]}43:45y{^ ?YfJofˏ,IoǗ^_.n~a8wVbϨ vOW#<̞ڷc^nG7 qU&y5Uh4[;߾zc~bG#e9Vc P轠 &j &Ej dbڔZYW$@Nj%Wq** 4xA8bDW,Y)b;MU+J9I cx{NژdTbf\mW5b(g2Hr5]Z}t*m NW ڠYV'@=He$qkvWgoOyZ9/vm_wڤ{dYU?R~?|(^B?zy|{.yAN`hC3Wǫ׎f*T?|k3<,/OW~xO[8Ըoo?A>Zw^~/?kFE瘭1^EhK ^qpw/_V&GѿC9 T6ȃ;];i,+DgE*c~y8Y}akإM3ϻIOƹދy]ҳ+/z">[/?o`5Y_q\k~s~IQ+6!v$5e.]Ƭ!wmHt_WT\tC< s Nzm>,OWpt~$kvNj?^|*n<[q7۱fϧsB#jhP,Tݪ`Uknza#SrjآSU4ɛ?3v7}yqq?|\8gKÈGzQzo4c,a<C7lǃz(3+ȦegB?tmb?޾NW?/i޲7p糽+"Qזn<7msSmO%ߤtpC1)ΰqQ֟WxWi6 °ИC Hc JHkޟzqt]֔%۟},~趿^5'\V[\.'}y)}ˇjl7Zp:o~_=.{6x_\/g3?V5`.W^֗5樬xd ًa\y6D>bWti؟\w_sPw;pښln^ζNE5-ԦU: sVq< f!>Zxv3oO|1ZPPiugp{Jng,X 9(ѐiv s?GĆaP%$ыb~Yyv !='/ke YY65#-flĬIvy{ꛑ7_n4i0 &dڥ3H>OLҦ܊Ry݉b$ϚP&"'nsq>Ҩ 3:iX-%Nfp&3n$%W$3@qĉb#FL̚YV( Y`"^v!L@c:K3FI{0 .iZb IԕUįwDęI+MOL?}a9Y ߽]J/՗fxpv_k:lM]Sue0U1 e`p>v튃4eO;noŵ5FFMе7zXIϕv VVY߶5(U7BǗ/^Xm׎ΆQWk_[Y58R$mtIYn׹h EPmjUt̽vࡓzGўw@Q}y }y"q͘{m7ϺQ7đ*>*!:;r?U@)34b9Ⱦݽ1pҐE@]8mB(KRz ))i#VPFl 4wNE;r4L 99v䞰0|)NJp%~B{ιrvAM;1X)1P<= /7(6td̻f;Uzqys֐LZXG:^|yn03y՚>#G)񂝔vŒM`> )mb('I#V\3i,a5ɳ2yD TxmՅ uclDBpf>[ߝԼ"A]c#OW3vˏխE5<̞ڷ qU&y5U'/uڹ?\~ 'nBsCzf9[W`]7 hY->CnE:[t #j;ン>1sȚjR~4{}plAA/Ρ1mnߒpĴ!+GyhFS6k'Ԗw#ϤחfALeae3=^0EU dR)x! $%&BS3^ ?hn?Ώfq/_' Yr lm`#!8MY)jW/92cN+Z13滲cYT8OãsK%XSJg2i/2Jl,-Ӎ<ƾ,N7IP̏H {н5n:*ַhu4 ܮ5 L M(Bmk^o>_:%eLOܰWw c?{W8JFf:iD̡#kstLcI[%lH RPQeg{x;.N>2j[)mƥ_s>=Vcg- b%\İ)sFc; _ [*fnAVg`z1nszK ;Dh2'\%zYO2-c}xJҔmWM6`m VR勼z5[7.gb>X9~\ekf~#F1}+ɷYh8 =rDoHo!Yup+' =s"΢3{~*eP+*}=92T'?u&y1IGQm( ߭oשyP,G?S=[dEIvk(ʿ}nL}ID+ҐǨ f߭~w_Dq-2r<6hI1xk "Uyc$Ssbw?x\KeTyɅh'BgA)e,T>|{s) + ^*o g\Qm/c') Q'B4tA >n|P y!Mk`э:0mmzFXN<:Xg@Pe)x{YMNçOB}u|{`3\Eža~{kc+#J TR`e+$d }'Qo/'.0Ep:١@`0?-nE6G9U=S7B%B1y>Ջ2yp Vj<;Ca`[92IVY|c{y;IuONǂAܔUb7BD&08s6Z9E: %s)J`4+Gzd28L y *:huODq+) &UXөs-5y|, 'eģi0>ubs@R!B֢BtYH/J( ^)$hgP])N uO-&N)dEW< |)[Y$3}~~a-Bsfnn}KB-S'}SN#|wNbR'v&| (B3(u^^e$ɯ?+IU[^BɆ AiaLzw'kPz3.0D6E'ȍύ[u:%ŌS&N͍ҘQ$R\>xǃĜĞkd>+ݘMg_U#(G,R  jgXnsl͘R!R ƫ_NFx In1V(t1P#K `MgAտb=78kZ{J?88s B.b 5x,\M:kS0$*l(|v ~"pSCX0xf9@>@frV #Wࡻ7K"MB]Pcv1~n [1P#_HFca/z0Ih&S)@  4QB3 pzj+gq~<Ӵ8|1+]<ӊhDu` UdLArAf12qĐڇ(gLS.N#)0 -22YF0?b6<QAfz:x 5W^=?+1MK.yN鍎:!ލ3`Em 91F 9Y6M0iOQJ@J5P@Y135a2oce*OqvGݸ5@ D;y[Ң=To4+aks%FVKMѠll}?.熆=`j2[!$3,iٗ M>aee IyPp0iѧg3zaAEhHjjF0@(Tى%Zb%Ŗ" -/E- (E1hSlKrܽa{`4?O}Y.?tuC*fp%:j?3g/:jN6 ;;"túh%H+d x717eÞl9ZFxdXE׿`K&bÃEA-P( ғ1M^8Iiyz^7OeDMi%Vv˾^"cH/['9EH(n4%3T H%UJβ_ /Q,T>T١wGwsH!ԅ%9:.ƪya7S}nʌ`Hk>X=el>\# '-F6=t4[O[ eÆ=nRwy]<,`_i.|=]slj꽃}1as6Áv96x6U2ae=zZ <q,iS6Dci?.$~c}|+ɷYZhɩ@D`a?S=x>hbB%,23="f} J.n+ xD\Sg2wcdZ}yu*"v Pڑ*-KpNF}([j]HNyݭ+ "Ark1.O΅OP\WrHA.dj{[/4_kXP"V‹ +aJH=ӴM+)* R9PXb1+5) {:[WJ%2yK`f߱gk0!Q> | V*y,=;1xjMt|AV p ?R<=.K0Xr.5 X͞`XTR8lA mH\@sLKITMcaa ?mM8X6NNUB wFnF&+VZi"{}VbZcur0J@M`g\SV=ZqHnm¸ ?;KVJ {\A AlU[-ºh`,CTc2+a`=bTP!9 Nf־} Qc]Kerz$eVaB %HR3b燱=g,ɖ@(fX!/b>ݼ2G~t9u*AYe Zr@ѓ=bZNiXھ gm)=^΂=xv/<^uUpEW 6 { !+<#dO5quWl?{O۸_,0j}4,Iv âaEҖ"}߷Jme&Җ|zk!g.Ag+YzhqSjG7ܼspvB|'dZkpqB!]#[ΩCS1^#&;dٳݑqJJDKc' TtNISꖕu!UF>"LjQ y0DT N:qWC։2ZOvWNq`>g@b-}kl>}LWP,)e%g +]A㥦 U¾*,RaW՗oy٠:ϳF")SH+8 ޷ߺLC~VPzCГxV$0AF0T'}jv}VDZ=Io@$5/WJ]?ޠ1LE&UN{FHyQc;yYpAIJU8}U 祟N~KKd-P@s?SۙrNӬyYk}#h.ttϟL1B_(7Uv7>(!.NʒZϰsp]9v,)9ˆ/7_ GIjJyR+r_n`|a~dK2 CHϵYǹ6)hpj ڲkڨV EؑizQt㊌[n%J@7:>Iݧ(tz)m\זm>Q')v"ŽG'P8dm-/zq:>$ϧn@Ǐ;mYΥQ* q-H yGa~J(%4X.^/=F/K&N^ezY/#\lMjW S4v_d dy?o=N?Dtڤ'jȠ<ޥš,KO(D4x| t2a m%=ťQ_ ǏϸIϜGB!=-X|XC }[㼚u>)BJ vsh1N@*T yRKJQU)&5*@J:|@Hm&#P<|N=-{ǏiAƀݤU͹G% xԷފ2CȎrݑx\amF>Mސޒ(>Da01r f㈋އO*Wt.Rt^IaTҔ=S9>(oRa(OO~LO_A9gܵ[TQ-+wSS:+ h^DP7a&ަn,#=3T|f3I1 K$m:K1TQ{_ G K*}:(INDk&͏Ɗ2ؠ.w\ $L'}|:N22ŸxM:gnoTs]\10b[GfS~TXAd3|K#~ξ"D't 0gq:?$VGkgCAC'zcǿj^bАbݦo$K B,'WҼ *S@_k6Z2h>NzѰ=_SDZg_B32TB }TlwI{@H"!;a2ƲB@odGZ8/U,Y^,> H,=2VAX#!P =D Dx]uaLc0&E+<RT+ q3f)77ы^XcS@KZ!d3e&lF9/_FW x~~>CR C@6)5Y^$ \W^w=PžJJ+X{9"3]ͼ^XvcEp,z5Z0*:cJ/7֭!MR_h{'{oVdJ`)^!=M1 ) !GCVc4qëVNԊi>lT3dHHRKsjwyLflO;84n+F$̫Ӕ\`f^cǛ3=p7w|٘ EݚDa}aN>/UzE6DWס\y8vbA0P'U2n`sr*lN`|3N}($) ,KK/ٺ{)k(#](k|"w`橣;,xI0(B ~p$'/آv2><$1TN^ d_(̈́֌`nf^'.䃁T$hueڶ=*0pM南.%E48ücԄQr12^E$BEP&- u~ 71 go8:>6UJ0#/wr>y3i|ТɜurNb?11tTu-[ XǭRz}OTs]\28:&(:VK&)cD1:tDE;Ġ<-'Xg8;24ɌûZP.^W#7-_%s7WuK60FtxfW GK 5IfYŢrҗrd#HQ0?u.m^u.m^粞Y=@χS 3*ʄD,L:aJ1f4OJT(R3XdB :m>7*l8Dbl*Kuo,l/(au=fs%Q0ܽq~*y$z'#* `oIk4nXbXשfF0fwNzL}0\uh*̃a龓8Rw2aV"1Nc'߼<7v>i=1ܵzl !k56_.m٥-:Eg51EPX6ZG#̸͓/XŒd0 9,cJ&&' ?,m;qx;"rAZ'yEBu^Tp@D DuoTsˌ81DNf2fulum ;Ǝ/]h +j:^^2V Vjm>Ա3ùGmJN_wB7+TQt8)0R(v_c֋U;=cU$C{ZΘ")Ĉ ,䂖rBL!'W{ay 9Za4Diݥ`|}{eWXkTyop$ d߮oS]GOx_;IDÕ9!J"PlT S'vM8PpWLj:.#H7M $8,Sb&||-5Qʾ{X=q!hԣi:瘬%s(TĢlrn W _(=3iVrSB2s~ _N@Đr6ˢG|boӪe0V|8db _^ |oY¦ ^pTeSJbb%://6bol,ɒ*b_W/nM4 ?'--a =ިD uEsH}΋)Aw/ |8 qLK)d(FPv{8M4 ,pd,smC;4NsU.t<7NhBibV)cumsphW4D rM[%쁈Ę#m)9ƒ#$F}G ( 1A.GI"V(<KEP,VBzblA)"L"{0~|EM;yx0XИ pZ~3#4́ @Jzsi\):ve}N*&I,#H$ٻ޶r$W f{~Mz,f3݀D%'E]#[(sdGQw#2"",cAќ>ډ[nrY@R΃jKUT[v1kcyqLF7/PL_^X7J!osha SPEvLiNc6Y#DUץRdu)2YdgK|N pz:Sy * 4JJ_ŽrZ϶%J϶%~zulh*oMlqȵ\?!PeY.yX/Z^ oW}uRobVxܓbDdl:)h e D4L6s.tt=&\F` θ~T'2 )ﭘ)91R R֎RCYEGMuXR@GT,݄M[gl ÅwRQ;*e:%mr3<521Ky,Z8 P :R (ZdPk38K*V9y4*P%dfRZj1f167Kj7y^Ӫ灚K@& =,""RTgjp|EK0U_bڍϦQU;BNw | \Fwd"mZl&ɣ$A651 2(KˢZP~qD3,NA%Ci%`>)DD(B wp& :~h_t0x5xlV'BI2rG*ɱRf0 '4*oRCP(^=sNItJ"S)HSRugnEaMG<m✹6tژb@C}֙v.Go[mj[tLtYGE3K(DBEIϬV`ٌ5\0 @Hh+Ɂ'<8M URtҦ6٦ Q::T% 3L 0$S<$ QJ(zAB7(YtnбC%Y{+3bCm߲KޖخBn)wJzV%g ,x KVAmco!0}`Ǖ٧ReB3S}-欵sttOQtikT7oYOZDaxbfdv>[,#&/w#]VNbTS rx"t굕T1o]ujbKGՒz֫*4-p9>B P,~Tvp~Sۆh_hTqӯ-fj mL{]Ù m/Fy}Qĥs$W1=%1}rm6zc]X WIʔ }}v!sti.|oλն-#7-`e'<@[,&֒= []p9-ViV߲ŘUZo<~sՑfG sMY1d\FP+U.]Ynu ~ %~EIވ> OAhϲ^6UvxE^gw^yOv%A  k-r"AceMJhc3u6Jv45$RCuDv=t;} nSDJvtP` |/?Ңش<` n)q)b@a4_foST1a4<;z};yuAdF "(<剡)FvX-:[ze%mMRC[:~-ҽTȐэ;'Xn`zAsUv(WD~h4V2lppL>4 B5|L;Ym\e,}}HEG?zV_ۉ۔yΨl3N9NBF)CJb:F6VCqC5{29@ej"3.(-*Leb)"4*[B+moVj/MuYAPM>ڳ9;+qHQ9urcsP rY͢^v:[(bℨN7z MU41eNz *O]0AF(]򃍵0J)GKvcЕd5+#*/rZ/UClvĆ=E+ J8[-IO'҃t6>(8!.*uVJا/:;+e餶~6>ӥ-=as19*O;/ӰANf‹=)UO/ЙS:3C5y0wU1Dx)*G>mNl#"Au1N$Ff|W8OhOyUu Ҏ[m)G%rtփ -`?H i w=+͡T5Q ˈYUqx??mSÙw3zt_}k.BG0l!Rj߯Ju*I7dX2d4rx= PF+Jfc' Ob? PZ2#:$-ڧ4sqrZ-]}z˭OOvw1_ -dJ$Y*//W!"8jP{>OP yk\&K$.Fq){OS4\sĤj9}WO&+eKD1#$VXJ,IH986|z')L x0@&QҞm-F*(qMLWCpw^sh'LuFűJ)"胖~= "#QYv%FaAk4JB3Ǿb2[|7VQt~%l(xb¹=~84=~t_o٣b(}Gwo(2Ha㑫D,9uA뛶-۴T8m;;:khP=Ks;$`4I2N^#Jh&$4e(pO$ #{XjIccÒo!Jm^d[Ϟ2fU2z ZJj-bt kڃdMA Jha"F~~ý S9]r :ÌKAKa)η\E!/0PI11TQ E#92|ӒH橐BK07YC >9vO'󠧏gGrW7_Y| ">#R~-N+NyֈfJ׫`q"A>зcsqkYaױvcFqٵt f\|V!dg^"@p,aCesX얋ߦ%.IBĬhM@4h2!L|];nDQ37bȴ!#F0 g]v89HQhW"fk98Xe鶂 \#b#+_]PLntUJʇը`6dV1161>Twcm z-T LTR+$!hW=\=s=QHeNhъSR7e8AORLh% wNF$gˍY;@R&ʫ%˅Y{ɓL;Xy'#/$xbKBbr6D[Aow[ۓXQ˂Q rC*HrWW|vx\69e!",?H(gI 53|yn68KI}Ÿyq\ErWT]mšo $;>4m|-nxJ{(nQrY-= ` wvxjx 7"*՗EEgbO25nNj% 4z"__r)j@%P ' 1s5R\X&a4|uuCI7{]TBX^ 5z쪋rX]Iڒ]pHʤ>t´Gw> xOr[n{ƺ{;wۢVMݽui9i+bsRvɨEɨmƽꙍ4m_^cL6W-ԙ:vԙ .vc[)bQʍI{t3_WCj11hc|#+^t|# ZFP@K7IG̥c6ǀ9TY$48Dgl+؋52u㟥;d,0T(J]\ZV0GGq[`[Cr9# |{Xڌ@zZG%$7(tKh6;ص5E()h"jJ6xx ]5TlzyH1n"rКQt6INu@I m6x_6YݕYbQfÇ(1ٓd}deGd`] BX>S:vJ{ޤqH]c ]y~cۘI|fOuX]2H{sSjn*Wj *q78]3'"Ǹ7Rwjm)RGcVJ)׹+ nCDgE]FN#r$EV}sJ ZaM@wOj.aUWh?EGO4n+n@?J%Hrv[F(̿xJZٵ;ȡPʻpj[)U]ɨJ^*e;JO4mqw1:,vS2aRs@SpHKK*?:Ln&>?=dcG&\.D(_qF)Kef@ p[@h 7HE%:ĉzc: J:7Aػ ul]Zv3.9r"O{lKPe <3Bl.W"'e4!}(r¸>=0G$ a钂=/8/>4%4d61EBFh}BI|@w$δ\ b4DX1<(t0{k;!D,o. $i#ߠig? hۃ4z43}˜k諭nVF|uefXۓ:BHiM%텲ٺr^N:h[ GaioUb>c;&u.20̠q:X;t~{#g-}.L7g l<=9>&%@G;: f~9'Q[OW HIDŽ1KHS[ Fi-Uy2U@f1,9$mޥ|:kaS:6'zk3 ✚5Ӝ7>Иx=+JA!۹/ZovTg^4vc3/ JT$ht'lhrZr5fy&ӑ7j1T "iGym" τNTI(xi{GIX[ombOluxK@]pQz׻?YGUl8_S{շݙݩAa_:Ol>1%oBb:ՑPXuA>@XEx MSXU-4 E5U[BY ånJ4]w9^ϔxfeЀ??,qkيd$ϙ@rb$uI%,qS^$#.jR+A}z^x #>)Fx"u(/l/ҵ+Ub#q+Ĭ"Rpު=pu{9a ]mr(,ބ+#X+\P4ٜS1DTGbðZaG lɂe*joaZ x[r.,cRӈIq|v1=\0-)x49, ׺) 4i݃HG7gKmoL%>XIqL77^Ws!kUs|Rz:Z]Vot*",nN( 0$֗!5'V d13%yxpy,nIwp(9tV\0Vqr%Fj|j;H` @0RRP(:U!,/qB 7L:=*1 6P0/pN & vOGtEUdywkbXׁW׺\J zU z68:P̿ WRou%rڌ~ϗFSpȭ}拋v VcmCZyHyؘLi!e Q#&Wr9qyRx\*.EIx>YFe0:D8f c-'2hXYA0^+2@ 2?Po[P*'}*pvB~ WV*abz |&!VRH+BJhs63ajt)q:05fۭ ^\P/0uVn:Km#brT^[-{8w/=ޖh^. l5<ג 4t:yPY!8efjv9 x)kg$6g<ך۬GhAnG=b1ְ;W[,9A6< f_/pˍ[W!t>cɈh+Ʈ[!56< WjQ-4=;<*q'[$ ꍶU -zN9<)yi:pWB}e2]ta:'j;z ! !_S~3g5z lz٫_`3[c+eáp h%/shCzҒ:n> ?ߞox Gl"{u 4'oME,UNi S=GPyX}G(( @Z ؏bp|;ɍnl`HMnX UyXZyOnɍ#P V8` % j5w{;Fpp7ywы?c9z1VF9avjL͢aR8]R~2'd-%!̚ސ@g˓7{}!++4vJBJ;%RL&(m}8X; Wk-պųZxVϺu ]M !"JqrQsZa:csagR k}ШچcbLlh9:kO Siw\Hbف7\{2ٺ,EAye"[rEomVjY[WӪ\S3! fY) *lyLRJVrLURc?p÷JT}66ܺ*UJ?Z ]VӺ[|>o俈h)R!rKOD݇FycDg~[h~oΆk쌼;o˶SmC?v6Ml hcd$Tj^ ,8uȬB9uxvFЩRo;"pKY&bѐBbRh)IhUa[7hvX-֞w:jq*׺+lCc?Ns~[[FWeB1^W={œ 5PjgU*+cS|$ ZYh1[(Kژ ,Z CmQQ\c nǎє.)/rnK#AѹԜ !1 +N HVՈ[%kBf!c c Zⶔ 9d_&9RXE3v= ݩ{C!jcFgBu22`guFB+++$]~YhQkcw)Y"|}w 'T" dL';{>~Z~/L92Ҝ<-Bjؖ Zn+JzNM;l"%Q.^C+:c덪zKA[hrTɛ+6lCڃ?-BcaU$ԸVxֲ. n^`s.i,<&_]ZL6vm04Ƥ^!{CO:Ŷn" V"l-CTmKo,m+[:ǠӞadOỆ=Qqf!<KmRl"q qf;G/b|tY`R71B/fk*Erȿ>[ӺŵE 4%v1E1LK!w2*DX$(rwvV,<Ŧ)65OyM7Oq7|OC1g{O!_4 Fn<~tatIk¼C;}SN1M"xt6Y1A4 WF-M`&+4`wUzOܸ,)oo צ Ԝ]8@;7ϟ{ y-Q- 28ϧfчdR".uaYrө;b* #\sβ\ZO.ɥ#HώLC;YF7xhGo5FuvV`&hY17fvO1npwv 1g*V)6x*x1A>Tmvg Al6A}]t|tDžS9/}=IxeqZDGToϏZD?ژŬ}of~ϛWU#D$F89AwN7ѩEş7J[f..釋tMrZ& Jyi/_>K2ܒP `=ZzK:a~y𻿅XF/^j$o:C`xҌ@ _$tV,ښot\'(*7bˉR&3Yږe֯ڶ5ߡ遡E&k{7>/htHE/9oXfعbpE?ףT'hm՟'{+qwkqOe#0\ѕ T1Ja2h_euGH'ڼeNP ds~Sw=Pu`_9`i1_?eD#s+3cznDh>-WFD}lS L^WEFQ5jQа`&yGBZl͖͐c 9Cqv -%rZTRHo6zuFIXy!zz'X1閎}_ Y9=9=]|z:[[bUirNO>E"v v+_4\ɵyq.GkS[wp|^O)!Z-拚:7f T(&j"׷A C^ D6`E:z"`[LsKȞqIF0 {dr Cs6E{MLqQd4'ُm_+o 7C 0˲fcL>t \ȭ) L fJ^hՖT %m2o7A|* rmM/$}YT7>8,D[6 吗#D.ZGm\gt vVW=럹^r;yc':m`j8}H{ŏ FEw^$|ce59ԯ3$(< UL؜klc `˔o;5o f =fȋA,f?,\qxW;Kٝ(h36fPgLZo!/ /T"':1pGޛӼmRD5.oq&PvoiP\[D0Q*6h1MUKdeʭ($#,{Tnp8:y=IY-4fM3&qhm8\Q$ *u7)qR=@F BBYLJI5`]AxUDR%-DT^q)7)@)Y1( ܟO8"bٯA0c#/I{ lHg\֏,کs2zv'oiK{-NZE>%:Q{u E7Ƹf {ـ]y1l {V[˂'Nʽ%= Xn"TEviEY'eaur Zar0(ƠN攳A{dSΘVP& 22Vg"8@\!b1p=D}3 E8=uuԠ*I!/&iA;N0km¼%d=тnDȅ' -qˮ7(|DTUtðKsdbÞbIl8oV{-BrȋQ[5jm| ږ?n sO2b9=j (DXuBrٴVp/2"dDlĠm9 hIF0Wg|( Δ@m&], n  Lq#tXⱚBO$4LfNƴbד8m#Jּw\K8\l?s˓&BtKlJ@c DgTzpMCʱkKdh6q3׺wr q ur1w\1q5fOYf6ˍlV S ޿70T S0 f̓D~'}aK8SӺ({e^bo=DNγ/1ֳAqpbV.vVsk&g"e^U& =_ho L䚉ck~hpݤ{MЛ]%Y2} gNxzR=szn/y1^q,&LXL<:{'ȇDdHtA y3zw17TEe*gte*f# 42+T RLPa=^ gp~jI(`p1[:r6+Sc ‚_@qE؅GG,yrg>͋< a>=M󖺛Zs̭L4F3J*\%{Ks%De-Qn=vlKJAIf3|oҴ7tI\":Oդ2^T۱2cB\U[LЅoJ{urdS6 h6(u^HŻnjtNQ[$Y&͐*gaw {7~R|wut6KEN]aKI*DM G0gA?% U!8&H&.C=687cfT7*f .7( 'e~ <0cjihb4upNqt֘SU-qwl_w/g3_5,]}ƻ_aT)Ew/Ϸֿ\qC:-G}L=:slO=w ?z9}=OK<@~n\Ҭh4:ήxp4K^LxPf?33x5@x;9>'[qZ)ik?U`9^uN뜓hz 9gACJpQ J`mֿ|ư!-Iy9M>7fy2(|8TsN9)kډ)D 5 jʘɩ.kOP'N:9 {%/Ĉqd(;9>'ypȞOX#H-`7 mtc\kk`mƾ1‘Nl#7g{C^aM_0= +6sxd$v8nEv sGvq+@vc}`8g٭n9? Vdw̍E8ȼB7Ssv03ټ8Jp7WhBݦO=F>v Ov̅es!d\ ]B$;t6.u=}Bxakq+IX-gШBlQ38V>:}H'Wm㸣h9t ~ڞx!sL)L:JP)CV;hTMCI],flTZY4,i1 HHL<AT EA75-Lf [Is@X#jh}^$Fz>1|`3:ۡS1lৢ1EZ?D<]rٷ:0!ƇhmFC=o[ V0qb5NF5$!LQL"5?}!Wp gr,śM- Hy̲󳚉 ^&Se8ub74[zGКBѾ P3LʔClìo n21T7̹;t,yK2sY"SvKe:Ĝn>twWFPhjdgR<S66 ^)TCU48z!OddoJLB rlc!B;v'[\ dtZ솶Bh t2n)(a.ƈ91+QK z3s]ծ(d^N D'=jAK0%L/:K Oޡ6jz HJDhҮKퟣ@hƯeϮzwARMXd$".Xfr#@q-X{\&]e~P"Y/x@萰f#&}lh{nNJ҂ewn_>|]C<Ŝ&-fy͇Snqj-ҵ֠d2) 2D[QD1 z귨AS:$l*>(mʑT9W̘8w(fEbYU$Ԑkf-i/ᑩ4&ؕy[VD zm!1$RB x |( eCF*f Nkr 8bؘ=9둾1{_.6G{1NDʖFvuɺDS,WWMEf`6]BJrji {U2Y{MvncrsŬr wX:\OIc4 Q3׍4h/P)fO.#/J؛F]Ɲ{-aOYee`PalTy6PaTiYӺķ &dޭr.,Ҫ>fջMn4 Rs #D<_U0=B$d͘>aլ TZXkCrN3m$$Dso w^X^ 2$gQrffU(w6o<]@\>|&3ZQNFiռ% $!{XMKgPC:)2䔯%P/CJ[ƈZd7]dӐJLKFI/A-Y4i"?A+x5kG&bUEJ$˹}'d͘-(=䈔$:>6ϝc[>|D$\XUCDg%d]PJC4j~AoiZώttEkɚ;}葳` o9-U LHtLj.) Sh5%>2y!:c5͢ӰKaWʚ\ 0DgXHsQ.H. Jc|yKƚ!E-ܯP; NR+[MUPdm-;Q(-ءK35ͪл=t^oWVC]<ö 8"֑A+:\ӱ vYVg͜&W@<.x6}suVU)S:ڒ5CjB};k(x 4 [~#w{vAZ{*Dny>hE֥( @u * vzsՏ_j-dUg#Z$"S'B?d-X3MdpFV! 䩇ȟӜ?ɋhVk!}N fէ ;Rㇲ).4怩 UwZ6.o>8@'혍Zcv4^+K`@]v͊ ;`P.SrҰVVԜ`.`WߐBg옍*f'}RlRwv5> (;U!\|f<Cttו:'k+. 5 /,6-Xhvx:}t~F% 2|%WϘw"gi^h2p)*&Cէ__4#6`_S]u$"= ˠCľ= |&mΟDbKٜv]>١*ō{TBTQk#N*_x:xG;WoAwlʯX.uжWu;rƒcvx5Lf=FhAΘo"&s؎׳o^e?ƕϝ݄œѽ6̻Sװ<252=27A'>cy(3sGyÚo:S<ȷa`[p1EԌ s ¬aikt~㌫0b0` y?s'l H{+Tܱ3xfkZ Ӡ%À3R2 N sQ R"Nd{E{l ph`Kmmm(E>֢:5(E0 3N;:Бa0=xILVe)2N:Qԩ~ewpҥ8EQ\e'm0 1ȮN!N(YZ'Q:^1jdo{@u(@v'C]UIq"*BhjZo#~ЎH y+PΓ(kV}b8=hbSsTQԪsA)cBUԛqVhGvFxLNIrE΃>E@d]고8ԬvۭYլvjVwM&Ԭވv΁Cl)`; S$U)w:#I'9jHkt)poz|s>;I to􊝍.,7Y\:M?'AOG~z}SkX~K<9Eo:xiS[- P{`;H0ShNy4('u,漝gTh)(,"LJ\THI21ֽiI,%E>p jMmW{/jU{v3uKU>&^i]_|*ׯo&ib_O_m?j-~75׍W. k?7=,HL-AtҊ ݴ@jx_UͤV=&]SzJV2K,YIk|4K|.I=,|-%q.LiVٯjf/1|8*t>,.6{18>>֡ lW=,Jt21})e4yK> I՚wQ_޽~ͣ*7;5K%}9Z~Pſ{=<:ņJ,&Q :2*щvBcQ @~l5O^dz?(SȬSqei2ᴈIӤqI .Hr:d}~DDYY 'X0l\d&ө̬DU"KaҦ'i.$'+0p`b//LV,)N,jq93¼5y%x}?zw~8tű,MYONfdTy@/\99Q7 | ? m^*d%6Lg ~Y?!ʟwt_w$N0@Bs׌1ςڐ;RX#Ya]p8 Xr802 R!A0!0C;$@yB>]xaiQAD@tu^=+qC y@oIC$=o m՝1߮,]]/omcm ֏u>ܹ^X _fxЋG‰4ZT8_wrpIan =HΣ աSAulz,ZJ7a)x4V:|v2xit}l'''}Q ZM$1=zjtN 4FDžVQ`ha$"PHtY))8NWI<.m%m9اta7x1%ӯx)]}MPoo.YuGsL#quؤcʷ[k@_L{x:q%$ ۻyk@pi^.GQ6fɩ"a8\:-[űҒbHʸKHj.Xkw-{"[xJ?z_ζ̜џ ֦?^Lx=*m.ȯg#~8 1at卤]&  lgK66ϔ@%LFY6YbIX[TH㘢޶UE[Rզ+ۆ"LQ:,6q` 0fE2:AJ"unWOIIBbM!,B0N0vIcUiJDQbƹJiчR푶]z*aզa"ST.aŮ`Q,TLcL:p̤0Y?zRϋMARndY oL1H\ƜTI Yݓ ,I~*"色A;,SA~XdN8LeѶwYZ@#x]yc8u=?;c npHiB!E^@8$`iȱ"6_Ykr{\;InA\7ivz7wZX67+mk}2Xw+J(v3 !kI\5MuA}\ٻHn$W ~%A 'Xkxvk:ڒ,7Xu:Z-RI`0gTxoa4g"[;9+ $xWSX."ц1rrHrS<ِȀí9Ǔ@fWЙ6l/w?<䂦ţ^(jュ^Pw:HaZoXQ^n4@hj=vufVc~^[bx3yqe`d-k|z=N +qښ~>  lU~f3 |P "2 !RB)W ,?su]LsE5U0xWWqƟ[ʀzkHcF&Zem[gh ~^Ri4e~cYY}x2\D14ȫ_qo]ō4\efQVjFmf7m8Z4YĒJ,c :)I4Ճ͚1ժbZ 9vF*ѪJ`ZRX N{(~('2\ZgP3qu=4WDT!y(I+#OPV|%1'&籟x ~Gd4 ٩ĖڙWl 2bMDK|ng!QFXȠ@z|?|_I B1m춻IEحrқqѬb#bpڋjRƒԥP\Ai-HJ\]Vj)u0<HvԄI TݶXftF6*Qx8iDx2k4SF,蹟a_>nbcySfO]ŀ3#I\tqY4{CBYV~O5ǯP ܄p%*;ھڋ+( ;㑈{qEI*g c1C}L'κjʢFbۢ&ʀb nKDU2:QW*PH*@ %WVi""B p4`JFhֱ>Kn f8I*xS%t !FuP/F&'PNP KZ,FC2*O&CVK< &o x5NSePQ4Zm`M%mۍhYgn `yQZEP &rrś$Zs)QU0jF垢~8(+҃:( OūG.)HWО !1 j,5o ?j> ;ytkvC0hpv_BC7\ $ӌEWs0A PB#t5E*%)M_Է}=䄊-"͚l&0F첀@%>zǢFȰ"vocum*0xtvtrA_ @zv 'pjy/UGj!Y/ ١xNh -z>0^H>$RRO,Wa0VP-er0׉ `jM5%:]D8IAqI54(#N p- T2.@Fɳ4eE8b{> /"І1b&+j;)0 翼Mϔ&u\j,H )4=KNg+Ё"B0cV'~{wKNyER% mVTo;*T[$EYy|vkÔ=fNM0m* mQ3!~|)+Va u𿣽{sft 8m-L0b[? ~s<OY7:q{M:,v ZRN|+)^~C1ODSe\ijUns?SLo;~gnZEpηwiXC'LzE4GHZ 0J%@=)?w\sU9Kų}wݧr"_i'6$!^ЉkN؁ҜAҴZ >iaxl VǒlJ^_j`۹fc=F<\xHa'蓎%+"f$ymҁ*< *ͩshN%R@-k@u1$w ?;P4̂ jC3Ut ,hkI;r(tcth={j8ʡj8ʡvaۍ cFQkKhd̎@lHBPӂ:xy#ځص̌rJݯ)*A\j,lL1Թ&q<|\*!)bT0m.U*kR0w w$8"8\w@=Kj#ϣI}6wZ]w0 l V.%nZ "nO3_-i%nk"5rDro^!2d02"='g%¼-ƵƐ mj0kHHIՒוJ5(O?ˊ'H(G+/ hqJ@r Q9WVG0zq~nbaM?%q\"xntފrO lnE94*Eaɝe1(ŠȴLD&ͳ!!;X;Rb< p oy[)=~ 9Ty׳* x^^0LGij@0RhWjOFƗ;j^[^c;jNQv*SyʣSy!H#Yr| bpĭ Zb67Ǔ,1GPKSv|wge슟W*͸W{mG)g+5b@ԧyyROQ>)Fb>)ڂEo|1PN;Cģ I0M F;Zj-tHdJ3+LڪyǬk`Bիl} W\u=\a׊ў5ƨO\7ߌopҽm|wfxfL?L~pw{{5t]vbsY8]9$lݺgQR9#Bp/^<xE eZ W;ktrPB:^ZRʬHvM (A H ԁ,⯻a)=_wVWJk}I hNPN `ч`ಒ%ч1+e]PⲹlD V *ԗ^eV[iBSa$h"i|y?/.D=]:{f'(|B 9jmsrxtNqƠMR$i| _?14;}~(xݧ `^f+/+/zxS+Vb]()XHŧN#} <\O6NWˀr.~XcnӃzE#qP# 5RJ_k=DJ;ˢU\Qµ3`!)|)G2噗1WG]UTCY fk&_*f4Ņ9OeSMLfO\lYj(kxեRGfI"#f &7ѹ(1'2P-phFKħ{Miҍm$+F>v߇|f32/;xd)$դ겛"uC )]꺺 ,&ekamxvMKjSS6hy0R Ȃ,a.T9g)Ԛh()ųosj<8lK m`ԙJg 1jbk1`R엹vӜ\>de9vi8$:N5~$xטi I`I)+(;1"`ȋNi0'1C(of+_Eja۶j$`mX;2RH3cKo5hJDo* L^m9[QsͱUrǢ&u ccl} d2dr2:lɤ/jef$x`In$c`RiqXZ_xRc;l1WEG1FcؼNXoΟ=?{Z"9?pm/a'~@nZ39YX$ē>+~h/!DW| Qnf_ϝ{ĬC*K9)&uyJ)x̭y ndVe<~gבcy@ߟ];If."ϧggi砱sO=/mޟ>}uF9Y_]t0pɸ3x> juߝ,{ l^Fgŷ"˯9ts ?ϟ * ܙ`^m7!goF7<Թ+%{jnF~ѫnS)i}|}*~;mQ~wy^WQ{<wIi>y };u΋ާ @ӝٽ_Vn|}S3e_W__8aoG{c{{0c\7`燐K;y0t~јQ(1k΅uӼWc zPpWzlM~;qO0h;&`T 2e_' ?)DTY^Ϥï׃\LOX ]kM1WW~!/#ztEl](x֊O1M$Q֡hbvgscf( }XJKI6ㅥ"?ʗPb8|j֟?tC*lݳ9r!wV1˴|wHݳϔne/Dn_Jre_,ܮtXSxde t+X<].~~M9=UFRn64q4DwiьoʅC&;êdIH=CX(6q a:<i` A )f*J6 ڀ.!ۀPjBBc'od^&b0])cpU<C@6 ʙg*6*uAqIzDl&, egûbjAwlB~XH[,(PtR v s`#cTh^"6jg2|p[+=;+ =?l?b3pgibTLWzut8W E!`o+PI~70?u`Mlȵ<k,@QUdGѺc})B9ۀ|(Tq:_3ܰNAhdcK9f݀ŹP7W`bN3D%ɖ6f9 bQ]Zj$Z(H8PjRCFxus7T{BtWwu+K¬wiE0>`$*v8yL">qQ7ˏo+/&EH2X! u&a>I[hWtK)zmincZc65;Ub0\]S8 6sD[3b+8;̬79ח@T+vAZjđ&AJkPl{ GX/IT{J&1ovͶW:L}g{~b*mV? koDDUөbY0 f+%"s4PԈT֦FSu>FVڑ>Г|S/-{)VDO T3fy.,L I(b3q bG#>S0 PX#O `JNN_PMU} 4 I'=adj@Pq'!\7>u-t)H3[(i*lրPK|bu_ۡ/F7TTq4+> *Vz$s6_x1^BNbQخEE^|z7<70o_P9_67 reEm]g0|_>47 {UT6U< ;_j`C*-"ChesFkKdkKْoz^dkKd-7}Ŗs$4Q JҔ S 2O̖$Baڒd< [2_V!V|<Ҵq[WZT,oV=y;ݎ1()U|ʌ[;PY C7w ѧ9/s&):0afP]RBKi=Kn:TR]d ؞zj/lHDNiͮ#F< 9w/F77G`;sy)|WKdb"K{)#9ND{J&L4.\0g3>PT%Jc۸Z:'&Bmt)Ew8&131G!eE'$ DKfҚKP|BZc N'sSkPlBH*Ok=4 ?J+a |E}PK7Hk]7 E?=Nk!(.O2*h .-5Q 400|Hl㰉k8@j,f{W5@b6:gϺ0Мgp$nt#6TiHś[R3SY)G>(xQx]!XXHNAhkE\B0%ާ&&koL %zab yPL\w27$ 㑙;GSB-y IhgC=8-%=GӵA&<߇E8`H0O=dY;|LYvK/4Y*zv=-~{êUT.ŢX`~ZIhɩbN WG#'~dxb'#cDVar^/T%f,1 q2c htǿU/%煷\ᜅ,20j!nuf)t CxyJx#R@JY\T8Sh()ZiF[&w,}?uxV||mM&!ѭ 'w*:Ks`B?q>->B9OHȴWPr#7°. V21'_$pufSl+7wŤWeٕ\Rz'qdfcހ/ aYLyML[Is^QU^ "5gu ,8SnC_̦g [nrb*HmMB_4-5庹1q!I`$0DRf U)uITع"%F6ipIYܘp?Dw| :b657`3PJ2 $ #yh^jIT&r L<LQ<3\򟻠j@c$=3 #E൉YL아8ׇ7^!͈@ avF,SNDFbf @CKN0Á2oF(so2a3jam. ,Bw6Pmws/$~)ܛU UBUP\lqFN*ʬcpR0 M1#gQ. 9X: @tHP)(wz`:!kq u([mOmm*#^BnJ^k^h-Ķ O4`-LLi(%Od'Քjl 8Du&5DpN*s\qdkO@㺦U=+e4[E(H (n?bvFkJҽ1 TeiK1B+O fR 6S(:AIP ÊR0M5Q2*!2$(XiY@9i-*r;$)Aƃr<ͼ76 '2.w)0RVDMƬ4xD"-VHoQ07LW)AO*V8lN Hƙբ&rBkTȤ@KɩFLqqJAgxmC!E9fJ .EvpN~[/ǀ`pY&/,mrҍ9Rbiwnw"sx,W ~y)ҫѱehO3w)߮} |a|zYm@я׷12BuX5s~Wg>>-$:<RY"};7iNp|`sn+1jr)YuLm3W0*:3 ږg"7w`}c&g5DzW+:z.vdӏ*-S 4Qh]'suJ $bK7Q :jƶ홨L"ߙ̘rLTB$FSm]n= $nṞ+(yip[ Fh:ʱލngr}?omdSL:|w?+$\q4R`MrG_w\Fs>yt-p^mNeUP3VE"U~^(9G(s0 A;}>~}]{8y\EX(l+l|7_>}wnI˺2k{#{L=A;*$.(6U%2p ݑ,#`0(#3tj7\y7ܻ_=2c Zh%8b"[?v%VFM<0`ziHS2(rhei*JNʃ(Im.lsp%7:*_(;_EY0%wK;ЂBY/W9r)/F JWzDe:A(fˍ˕pΗ `Y#OO닚f)5o5xsc\Io+YzeUWQ^eiv/&1jGZOr ^,wd;*J'*'NkbɵCW퐾/oy(8фqG7p^Mc"%ImN$%IeM5SZ"lgwO}}.(0` ֐8/$V`;EB)b\V$#cpCki$gib 01YrP^ϋ3i !XƚTe }R-km{S=/R0S='19C`̎e_K*Xk:{]Քn{a>0l߱dƿ*l-7J?^g罓ʪU~W. =LqSdhkNy&bqd C]D`U%sjXa䞈йv] Bsch4*4 =(HULnoh6s)>\ѣwvQ'k:t+KvaYKj@xr*_Dj]oس> _ ls.j|L1ċN C$`m\EM1av9r,d) _foMJL`0¨Zmt>Vk(bbn(ZL,zϷݡxzdwc䊰_G|VX1ϋ˳*9E#0yuϳ $-lsDyxhL(j"> k"ZOm˩ꈽvȹEдB9ҬYpUEFיeFy!4sO֡g듗><= %94h! yFʵ|wXJZY RӭIE"T^Y-e<Z+S6vqOPfURXOoT^Z4x*s7;"a.~YˎnS^l~fAU<@F'"%Z#{3FĠ"H'!́1j ^Q_î`n0N` g) }R@41tӹƿLNRYY9a\(8lA;c@F4t LIP9(JZwhf#*݄y9ot`_z,"=K !RA\gЇȽ+r߰C ?u6eKpg(L&\!Prq|2O.vB*^P4~Rkuk4/(^4;J CMk}vbGP-ىhUfI><ۏcےz۬n_%ۃ Dʖ"s" @CZ]9?m s%N;UVRF!~nx{|wTǛ5/݃sB0qTh,WȬ/`h"ɦ~ߡ  J8CQoQ zT o\ε>w4-!z@`#Rmm,fW$eN2kbR{5r ԡHxN-q΁uyẼrA =<F%¶Ax͜#%7ۼ䆞3adG7izKu\3CSrF!$lѬ_H㹢2ެ(t8$V`͢LQ=H;UnTQAL`ɼXBL2x2ԞBi/e{}"´Z t#u y2x -$A{؇69ZTE8־ *_2:zQv6鳈p~2dYEE" ce%HΆ|t"k7Ws:=70t_ סkW0X RY]ˀFڇV]ej|6|+SVhpPoQf' 9xY}"cXASFP1Yo_?>&!' bdNȇpx΃2@2(q#*yZ(dt:2vV)4l*/zZNH4x5DC.ڙgc<$zxH[Ck#%j.NLOsїIw&ӿsi/+ޟFߍZQTөz/ti7ޓT''a4OӅ\)3zCޝT4969U`M"#3O1Od?1!hH4{·&Xzs пjgua_c/8Z;ekD,L"IaC|L2Ɔ&ћPp&ԛ1=0<h/͠493ɃiN)(%sLjm ς' !r7v.䒐Sx }6U㶾_}m%)u&!b$N&9 ׆OxrPz΋.f05=H)b:r-$jaNHÓ&$Ó6z6grK5zr*Oc(cdȗZȗw%5 R*[J oPu@Qx}6 N ^,:hZ)Nxmp^\ZV "T7)uBQ+kn#9} ,:wlc?:I @ٲB} иnhtW旙UIS[|#]`Oo36bƍx!z+8rdh|(Rcئ#v os H`:)!|hHw4F.J ,!|D E`bY$$kRB0VlCP0Xi//| 'eT1 /Rn@JDŽψ慏 ũ"jWWn%JߵL/  'Jvf9'XSJI`y4- e ʘ2Qt- B'$|Q嶷6/T֣Q;˙$۱t%X^1VubpmW"zqD8~]uFj㏦neW R\<aӘI+۱8cnš؅k{,&>}ۀkO#ݼbjIiߓu[(1k%8 .MQ+Vi3j菣`/m@چױIm08 [È i֊f?xN$I3Y(D d'G @zt@go`'0g7.Λ}O'o0Ǹ8Vҹs8-O:xZ1ٸؘYSƍO$a@H5Xۻkg˵W!w+9 2*\nͱq!*0VxIZ+LAy3NwWosҲ?۟OÖx`oA c_@~攰"0[E)3]@J\SJꃹY>x1}Zy;t9?̾t`rSdeuΝ?.vwJlV79VSk7s;/=8_k \?T@!3ݕ jUğga2懇 dقs^hG:=_]ww>•ǣ)Hͮѭb[06-fu^}u{ω+"D33^^,7kh"n&g3ۻtM}CosEY 3n[>MmXnr=tejZ-"4 "Z("%q̣$P[-S¬,%r X{0Z 2cmHcojFތ1Lgb(jKI}(u>ɧ;G6[~(nb8trWl>q{<>{t߅t@/Ze 0tL8efIs%# 2GfrR H58kJ.QTKQ ^w AMw% 9e>GX.NFKkf#:DmF.vvcJj c7-uOfu^>Q h#Ax.Z!{oO;f2MGe΢ x6#m)Oܠ< !rDzU&a[RtyKLJ*)僕R!sjw`89Ri$.pUs ,ޮK[Q]a2{?NOGF~#f)%d &a= `{~fӾ`7n>L`~C:7~̰3sz=Uz@08uc& O8ԄEq{y*<FuO/M閞#;ح-u6vK InbR#V-  GhJ*Ja+d6z¨Q3aQ "v!0Dk\J/WqMpGL1`wLQN8gZzA:W2 R8m6\D_O?R G, 6)Ko:I{k'0p; {H[YŇd0R)\8ઠZLĐ)a)S?5%Z`[KC6o8K#´U|Dhq2i ef5܁dHHOYL7_/g@tpa~7m'OE2i'NfRQGdr7Kyiף7>.?}}ay<|!I Jwc'' >׿}ßd 觗?Cv>of J[Xr6 o͗(A{9n8 zU.:RI4:&;3,*͸ӠmX*y^2Az1yvaI @^/K&Y߿?V.&JbEMH q?M}|?'fx=p|!LaAmn}@551kO-ٱ|zΛ'mxCV- 4B@Jphe;Kh;\4>\i|jӸbMFD>B˛y{]vDZk˛ xf{l Z%2D Eċ_2+CTvx-.Ue)5>Ɋa5OO~v5ώL#HT΀r"Նu(5#NHn52xc@tFUAeѹU@3`q%. bUrar+#qĞ 3Kcqz1 {՘R;%׀ ttַҽ' ʮ^$K܅vh2U`fICR}I&/'JԄQ1[D5N?Ҙ/$Yࠢm;W#>Hf =%ibp.u"` ( o$ Uj;mJ׶bÏB@S.g.aɩ~ٮnZ'{@+%a:2.33)$DoT mKsqEh\O|!J)JxCYlk עAJz,.ˠfƺUBn=Ge2h.Uiav3M@9LJɢXq-u[q4cGZJEYeVS \}JC7o 4GQ+z{X{V%`BFl<yGC=EG"'OurhMV|Z#0rz4a(}W&ƆU".&l}eHD(z5+Ek?W#%ۧCuJ;Ub2QB ;Eف\-dٻq$a1sk~w4vg'4(Ʊ=~;hEɱN,KA˒H~_XU$۳.lG6xqKoonE6՜R ΪVRd%boCo˒Ǩ@Gg@n0Ptbkو}|Ϗ >U>!+#T5qđ+(* `x; jDa 7.Rl{h5fL_;u8 RU8qg|F. lq+|RmQSR`kCCvOvO>Ri)‘;i+N?F˴@Hyyꂆ~h?W`-Z>Zl|hZg1p/BҺ8 Fg8Le6)٨^.HJDFʟ[ډQ vb `ub'X?<TP*gO- ƴ j-jM j"TyN8$ԢE,*jQ Pa$89J(QcÂ3zw>xY޺K*gnמ]xܱ/=+썇o(F1t(Ҫ~ڞSob6PSZ@7l! uH^ΦIJmBhrdJGA7u#8) L:`b0 ju6_VMق# }(CuGO )꣧8nyGV8DO덞ChC")]>zz< Iv9lyް|3rƟLv&MwV7vR[0^`yvM0Xs!KkZ4FYBiS-<( |